diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json
new file mode 100644
index 0000000..ff42f7d
--- /dev/null
+++ b/.devcontainer/devcontainer.json
@@ -0,0 +1,12 @@
+{
+ "name": "Pinchflat Dev",
+ "dockerComposeFile": "../docker-compose.yml",
+ "service": "phx",
+ "workspaceFolder": "/app",
+ "shutdownAction": "stopCompose",
+ "customizations": {
+ "vscode": {
+ "extensions": ["phoenixframework.phoenix", "JakeBecker.elixir-ls", "esbenp.prettier-vscode"]
+ }
+ }
+}
diff --git a/.github/workflows/docker_release.yml b/.github/workflows/docker_release.yml
index f80cca4..2e35dbc 100644
--- a/.github/workflows/docker_release.yml
+++ b/.github/workflows/docker_release.yml
@@ -72,7 +72,7 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and Push
- uses: docker/build-push-action@v5
+ uses: docker/build-push-action@v6
with:
context: .
file: ./docker/selfhosted.Dockerfile
diff --git a/.github/workflows/lint_and_test.yml b/.github/workflows/lint_and_test.yml
index 71e4ac8..c174559 100644
--- a/.github/workflows/lint_and_test.yml
+++ b/.github/workflows/lint_and_test.yml
@@ -16,24 +16,28 @@ jobs:
if: "! contains(toJSON(github.event.commits.*.message), '[skip ci]')"
env:
COMPOSE_FILE: ./docker-compose.ci.yml
+ MIX_ENV: test
steps:
- name: Checkout code
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Pull prebuilt images
run: docker compose pull
- - name: Setup Docker layer caching
- uses: jpribyl/action-docker-layer-caching@v0.1.1
- continue-on-error: true
- with:
- key: ci-docker-cache-{hash}
- restore-keys: |
- ci-docker-cache-
- layer-ci-docker-cache-
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v3
- - name: Build and Run Docker image
+ - name: Build docker image
+ uses: docker/build-push-action@v6
+ with:
+ context: .
+ file: ./docker/dev.Dockerfile
+ load: true
+ cache-from: type=gha
+ cache-to: type=gha,mode=max
+
+ - name: Run Docker image
run: docker compose up --detach
# NOTE: All exec commands use the -T flag to compensate for
@@ -43,8 +47,7 @@ jobs:
# See https://github.com/actions/runner/issues/241 and https://github.com/docker/compose/issues/8537
- name: Install Elixir and JS deps
run: |
- docker compose exec -T phx yarn install && cd assets && yarn install && cd ..
- docker compose exec -T phx mix deps.get
+ docker compose exec -T phx mix deps.get && yarn install && cd assets && yarn install && cd ..
- name: Create and Migrate database
run: |
diff --git a/.iex.exs b/.iex.exs
index ebe1a24..8533f83 100644
--- a/.iex.exs
+++ b/.iex.exs
@@ -23,3 +23,11 @@ alias Pinchflat.Metadata.MetadataFileHelpers
alias Pinchflat.SlowIndexing.FileFollowerServer
Pinchflat.Release.check_file_permissions()
+
+defmodule IexHelpers do
+ def restart do
+ :init.restart()
+ end
+end
+
+import IexHelpers
diff --git a/.prettierignore b/.prettierignore
new file mode 100644
index 0000000..bdc7a23
--- /dev/null
+++ b/.prettierignore
@@ -0,0 +1 @@
+assets/vendor/
diff --git a/tooling/.prettierrc.js b/.prettierrc.js
similarity index 100%
rename from tooling/.prettierrc.js
rename to .prettierrc.js
diff --git a/README.md b/README.md
index a550c87..d99b196 100644
--- a/README.md
+++ b/README.md
@@ -1,3 +1,6 @@
+> [!IMPORTANT]
+> (2025-02-14) [zakkarry](https://github.com/sponsors/zakkarry), who is a collaborator on [cross-seed](https://github.com/cross-seed/cross-seed) and an extremely helpful community member in general, is facing hard times due to medical debt and family illness. If you're able, please consider [sponsoring him on GitHub](https://github.com/sponsors/zakkarry) or donating via [buymeacoffee](https://tip.ary.dev). Tell him I sent you!
+
[](LICENSE)
-[](https://github.com/kieraneglin/pinchflat/releases)
+[](https://github.com/kieraneglin/pinchflat/releases)
+[](https://discord.gg/j7T6dCuwU4)
[](#)
+[](https://vscode.dev/redirect?url=vscode://ms-vscode-remote.remote-containers/cloneInVolume?url=https://github.com/kieraneglin/pinchflat)
@@ -32,6 +37,7 @@
- [Portainer](#portainer)
- [Docker](#docker)
- [Environment Variables](#environment-variables)
+ - [A note on reverse proxies](#reverse-proxies)
- [Username and Password (authentication)](https://github.com/kieraneglin/pinchflat/wiki/Username-and-Password)
- [Frequently asked questions](https://github.com/kieraneglin/pinchflat/wiki/Frequently-Asked-Questions)
- [Documentation](https://github.com/kieraneglin/pinchflat/wiki)
@@ -52,7 +58,7 @@ If it doesn't work for your use case, please make a feature request! You can als
- Self-contained - just one Docker container with no external dependencies
- Powerful naming system so content is stored where and how you want it
- Easy-to-use web interface with presets to get you started right away
-- First-class support for media center apps like Plex, Jellyfin, and Kodi
+- First-class support for media center apps like Plex, Jellyfin, and Kodi ([docs](https://github.com/kieraneglin/pinchflat/wiki/Frequently-Asked-Questions#how-do-i-get-media-into-plexjellyfinkodi))
- Supports serving RSS feeds to your favourite podcast app ([docs](https://github.com/kieraneglin/pinchflat/wiki/Podcast-RSS-Feeds))
- Automatically downloads new content from channels and playlists
- Uses a novel approach to download new content more quickly than other apps
@@ -62,7 +68,7 @@ If it doesn't work for your use case, please make a feature request! You can als
- Allows automatically redownloading new media after a set period
- This can help improve the download quality of new content or improve SponsorBlock tags
- Optionally automatically delete old content ([docs](https://github.com/kieraneglin/pinchflat/wiki/Automatically-Delete-Media))
-- Advanced options like setting cutoff dates and filtering by title
+- Advanced options like setting cutoff dates and filtering by title ([docs](https://github.com/kieraneglin/pinchflat/wiki/Frequently-Asked-Questions#i-only-want-certain-videos-from-a-source---how-can-i-only-download-those))
- Reliable hands-off operation
- Can pass cookies to YouTube to download your private playlists ([docs](https://github.com/kieraneglin/pinchflat/wiki/YouTube-Cookies))
- Sponsorblock integration
@@ -123,6 +129,23 @@ docker run \
ghcr.io/kieraneglin/pinchflat:latest
```
+### Podman
+
+The Podman setup is similar to Docker but changes a few flags to run under a User Namespace instead of root. To run Pinchflat under Podman and use the current user's UID/GID for file access run this:
+
+```
+podman run \
+ --security-opt label=disable \
+ --userns=keep-id --user=$UID \
+ -e TZ=America/Los_Angeles \
+ -p 8945:8945 \
+ -v /host/path/to/config:/config:rw \
+ -v /host/path/to/downloads/:/downloads:rw \
+ ghcr.io/kieraneglin/pinchflat:latest
+```
+
+Using this setup consider creating a new `pinchflat` user and giving that user ownership to the config and download directory. See [Podman --userns](https://docs.podman.io/en/v4.6.1/markdown/options/userns.container.html) docs.
+
### IMPORTANT: File permissions
You _must_ ensure the host directories you've mounted are writable by the user running the Docker container. If you get a permission error follow the steps it suggests. See [#106](https://github.com/kieraneglin/pinchflat/issues/106) for more.
@@ -130,9 +153,6 @@ You _must_ ensure the host directories you've mounted are writable by the user r
> [!IMPORTANT]
> It's not recommended to run the container as root. Doing so can create permission issues if other apps need to work with the downloaded media.
-> [!TIP]
-> If you need to run any command as root, you can run `su` from the container's shell as there is no password set for the root user.
-
### ADVANCED: Storing Pinchflat config directory on a network share
As pointed out in [#137](https://github.com/kieraneglin/pinchflat/issues/137), SQLite doesn't like being run in WAL mode on network shares. If you're running Pinchflat on a network share, you can disable WAL mode by setting the `JOURNAL_MODE` environment variable to `delete`. This will make Pinchflat run in rollback journal mode which is less performant but should work on network shares.
@@ -144,16 +164,24 @@ If you change this setting and it works well for you, please leave a comment on
### Environment variables
-| Name | Required? | Default | Notes |
-| --------------------- | --------- | ------------------------- | ---------------------------------------------------------------------------------------------- |
-| TZ | No | `UTC` | Must follow IANA TZ format |
-| LOG_LEVEL | No | `debug` | Can be set to `info` |
-| BASIC_AUTH_USERNAME | No | | See [authentication docs](https://github.com/kieraneglin/pinchflat/wiki/Username-and-Password) |
-| BASIC_AUTH_PASSWORD | No | | See [authentication docs](https://github.com/kieraneglin/pinchflat/wiki/Username-and-Password) |
-| EXPOSE_FEED_ENDPOINTS | No | | See [RSS feed docs](https://github.com/kieraneglin/pinchflat/wiki/Podcast-RSS-Feeds) |
-| JOURNAL_MODE | No | `wal` | Set to `delete` if your config directory is stored on a network share (not recommended) |
-| TZ_DATA_DIR | No | `/etc/elixir_tzdata_data` | The container path where the timezone database is stored |
-| BASE_ROUTE_PATH | No | `/` | The base path for route generation. Useful when running behind certain reverse proxies |
+| Name | Required? | Default | Notes |
+| --------------------------- | --------- | ------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------- |
+| `TZ` | No | `UTC` | Must follow IANA TZ format |
+| `LOG_LEVEL` | No | `debug` | Can be set to `info` but `debug` is strongly recommended |
+| `UMASK` | No | `022` | Unraid users may want to set this to `000` |
+| `BASIC_AUTH_USERNAME` | No | | See [authentication docs](https://github.com/kieraneglin/pinchflat/wiki/Username-and-Password) |
+| `BASIC_AUTH_PASSWORD` | No | | See [authentication docs](https://github.com/kieraneglin/pinchflat/wiki/Username-and-Password) |
+| `EXPOSE_FEED_ENDPOINTS` | No | `false` | See [RSS feed docs](https://github.com/kieraneglin/pinchflat/wiki/Podcast-RSS-Feeds) |
+| `ENABLE_IPV6` | No | `false` | Setting to _any_ non-blank value will enable IPv6 |
+| `JOURNAL_MODE` | No | `wal` | Set to `delete` if your config directory is stored on a network share (not recommended) |
+| `TZ_DATA_DIR` | No | `/etc/elixir_tzdata_data` | The container path where the timezone database is stored |
+| `BASE_ROUTE_PATH` | No | `/` | The base path for route generation. Useful when running behind certain reverse proxies - prefixes must be stripped. |
+| `YT_DLP_WORKER_CONCURRENCY` | No | `2` | The number of concurrent workers that use `yt-dlp` _per queue_. Set to 1 if you're getting IP limited, otherwise don't touch it |
+| `ENABLE_PROMETHEUS` | No | `false` | Setting to _any_ non-blank value will enable Prometheus. See [docs](https://github.com/kieraneglin/pinchflat/wiki/Prometheus-and-Grafana) |
+
+### Reverse Proxies
+
+Pinchflat makes heavy use of websockets for real-time updates. If you're running Pinchflat behind a reverse proxy then you'll need to make sure it's configured to support websockets.
## EFF donations
@@ -161,9 +189,9 @@ Prior to 2024-05-10, a portion of all donations were given to the [Electronic Fr
The EFF defends your online liberties and [backed](https://github.com/github/dmca/blob/9a85e0f021f7967af80e186b890776a50443f06c/2020/11/2020-11-16-RIAA-reversal-effletter.pdf) `youtube-dl` when Google took them down.
-## Pre-release disclaimer
+## Stability disclaimer
-This is pre-release software and anything can break at any time. I make not guarantees about the stability of this software, forward-compatibility of updates, or integrity (both related to and independent of Pinchflat). Essentially, use at your own risk and expect there will be rough edges for now.
+This software is in active development and anything can break at any time. I make no guarantees about the stability of this software, forward-compatibility of updates, or integrity (both related to and independent of Pinchflat).
## License
diff --git a/assets/js/alpine_helpers.js b/assets/js/alpine_helpers.js
index 56a7d93..9c2367f 100644
--- a/assets/js/alpine_helpers.js
+++ b/assets/js/alpine_helpers.js
@@ -35,3 +35,14 @@ window.markVersionAsSeen = (versionString) => {
window.isVersionSeen = (versionString) => {
return localStorage.getItem('seenVersion') === versionString
}
+
+window.dispatchFor = (elementOrId, eventName, detail = {}) => {
+ const element =
+ typeof elementOrId === 'string' ? document.getElementById(elementOrId) : elementOrId
+
+ // This is needed to ensure the DOM has updated before dispatching the event.
+ // Doing so ensures that the latest DOM state is what's sent to the server
+ setTimeout(() => {
+ element.dispatchEvent(new Event(eventName, { bubbles: true, detail }))
+ }, 0)
+}
diff --git a/assets/js/app.js b/assets/js/app.js
index 2be596d..e6e0219 100644
--- a/assets/js/app.js
+++ b/assets/js/app.js
@@ -39,7 +39,7 @@ let liveSocket = new LiveSocket(document.body.dataset.socketPath, Socket, {
}
},
hooks: {
- supressEnterSubmission: {
+ 'supress-enter-submission': {
mounted() {
this.el.addEventListener('keypress', (event) => {
if (event.key === 'Enter') {
diff --git a/assets/tailwind.config.js b/assets/tailwind.config.js
index 49d9b55..fdb2bc4 100644
--- a/assets/tailwind.config.js
+++ b/assets/tailwind.config.js
@@ -347,6 +347,38 @@ module.exports = {
},
{ values }
)
+ }),
+ plugin(function ({ matchComponents, theme }) {
+ let iconsDir = path.join(__dirname, './vendor/simple-icons')
+ let values = {}
+
+ fs.readdirSync(iconsDir).forEach((file) => {
+ let name = path.basename(file, '.svg')
+ values[name] = { name, fullPath: path.join(iconsDir, file) }
+ })
+
+ matchComponents(
+ {
+ si: ({ name, fullPath }) => {
+ let content = fs
+ .readFileSync(fullPath)
+ .toString()
+ .replace(/\r?\n|\r/g, '')
+ return {
+ [`--si-${name}`]: `url('data:image/svg+xml;utf8,${content}')`,
+ '-webkit-mask': `var(--si-${name})`,
+ mask: `var(--si-${name})`,
+ 'mask-repeat': 'no-repeat',
+ 'background-color': 'currentColor',
+ 'vertical-align': 'middle',
+ display: 'inline-block',
+ width: theme('spacing.5'),
+ height: theme('spacing.5')
+ }
+ }
+ },
+ { values }
+ )
})
]
}
diff --git a/assets/vendor/simple-icons/discord.svg b/assets/vendor/simple-icons/discord.svg
new file mode 100644
index 0000000..9d7796b
--- /dev/null
+++ b/assets/vendor/simple-icons/discord.svg
@@ -0,0 +1 @@
+Discord
\ No newline at end of file
diff --git a/assets/vendor/simple-icons/github.svg b/assets/vendor/simple-icons/github.svg
new file mode 100644
index 0000000..2334976
--- /dev/null
+++ b/assets/vendor/simple-icons/github.svg
@@ -0,0 +1 @@
+GitHub
diff --git a/config/config.exs b/config/config.exs
index ea10c90..f57e0cc 100644
--- a/config/config.exs
+++ b/config/config.exs
@@ -10,6 +10,7 @@ import Config
config :pinchflat,
ecto_repos: [Pinchflat.Repo],
generators: [timestamp_type: :utc_datetime],
+ env: config_env(),
# Specifying backend data here makes mocking and local testing SUPER easy
yt_dlp_executable: System.find_executable("yt-dlp"),
apprise_executable: System.find_executable("apprise"),
@@ -41,33 +42,15 @@ config :pinchflat, PinchflatWeb.Endpoint,
adapter: Phoenix.Endpoint.Cowboy2Adapter,
render_errors: [
formats: [html: PinchflatWeb.ErrorHTML, json: PinchflatWeb.ErrorJSON],
- layout: false
+ root_layout: {PinchflatWeb.Layouts, :root},
+ layout: {PinchflatWeb.Layouts, :app}
],
pubsub_server: Pinchflat.PubSub,
live_view: [signing_salt: "/t5878kO"]
config :pinchflat, Oban,
engine: Oban.Engines.Lite,
- repo: Pinchflat.Repo,
- # Keep old jobs for 30 days for display in the UI
- plugins: [
- {Oban.Plugins.Pruner, max_age: 30 * 24 * 60 * 60},
- {Oban.Plugins.Cron,
- crontab: [
- {"0 1 * * *", Pinchflat.Downloading.MediaRetentionWorker},
- {"0 2 * * *", Pinchflat.Downloading.MediaQualityUpgradeWorker}
- ]}
- ],
- # TODO: consider making this an env var or something?
- queues: [
- default: 10,
- fast_indexing: 6,
- media_indexing: 2,
- media_collection_indexing: 2,
- media_fetching: 2,
- local_metadata: 8,
- remote_metadata: 4
- ]
+ repo: Pinchflat.Repo
# Configures the mailer
#
@@ -101,13 +84,19 @@ config :tailwind,
]
# Configures Elixir's Logger
-config :logger, :console,
- format: "$time $metadata[$level] $message\n",
+config :logger, :default_formatter,
+ format: "$date $time $metadata[$level] | $message\n",
metadata: [:request_id]
# Use Jason for JSON parsing in Phoenix
config :phoenix, :json_library, Jason
+config :pinchflat, Pinchflat.PromEx,
+ disabled: true,
+ manual_metrics_start_delay: :no_delay,
+ drop_metrics_groups: [],
+ metrics_server: :disabled
+
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{config_env()}.exs"
diff --git a/config/dev.exs b/config/dev.exs
index 887322b..8b9b793 100644
--- a/config/dev.exs
+++ b/config/dev.exs
@@ -67,7 +67,7 @@ config :pinchflat, PinchflatWeb.Endpoint,
config :pinchflat, dev_routes: true
# Do not include metadata nor timestamps in development logs
-config :logger, :console, format: "[$level] $message\n"
+config :logger, :default_formatter, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
@@ -81,3 +81,5 @@ config :phoenix_live_view, :debug_heex_annotations, true
# Disable swoosh api client as it is only required for production adapters.
config :swoosh, :api_client, false
+
+config :pinchflat, Pinchflat.PromEx, disabled: false
diff --git a/config/runtime.exs b/config/runtime.exs
index 06aa731..5624bfe 100644
--- a/config/runtime.exs
+++ b/config/runtime.exs
@@ -40,40 +40,79 @@ config :pinchflat, Pinchflat.Repo,
Path.join([:code.priv_dir(:pinchflat), "repo", "extensions", "sqlean-linux-#{system_arch}", "sqlean"])
]
+# Some users may want to increase the number of workers that use yt-dlp to improve speeds
+# Others may want to decrease the number of these workers to lessen the chance of an IP ban
+{yt_dlp_worker_count, _} = Integer.parse(System.get_env("YT_DLP_WORKER_CONCURRENCY", "2"))
+# Used to set the cron for the yt-dlp update worker. The reason for this is
+# to avoid all instances of PF updating yt-dlp at the same time, which 1)
+# could result in rate limiting and 2) gives me time to react if an update
+# breaks something
+%{hour: current_hour, minute: current_minute} = DateTime.utc_now()
+
+config :pinchflat, Oban,
+ queues: [
+ default: 10,
+ fast_indexing: yt_dlp_worker_count,
+ media_collection_indexing: yt_dlp_worker_count,
+ media_fetching: yt_dlp_worker_count,
+ remote_metadata: yt_dlp_worker_count,
+ local_data: 8
+ ],
+ plugins: [
+ # Keep old jobs for 30 days for display in the UI
+ {Oban.Plugins.Pruner, max_age: 30 * 24 * 60 * 60},
+ {Oban.Plugins.Cron,
+ crontab: [
+ {"#{current_minute} #{current_hour} * * *", Pinchflat.YtDlp.UpdateWorker},
+ {"0 1 * * *", Pinchflat.Downloading.MediaRetentionWorker},
+ {"0 2 * * *", Pinchflat.Downloading.MediaQualityUpgradeWorker}
+ ]}
+ ]
+
if config_env() == :prod do
- config_path = "/config"
+ # Various paths. These ones shouldn't be tweaked if running in Docker
+ media_path = System.get_env("MEDIA_PATH", "/downloads")
+ config_path = System.get_env("CONFIG_PATH", "/config")
db_path = System.get_env("DATABASE_PATH", Path.join([config_path, "db", "pinchflat.db"]))
log_path = System.get_env("LOG_PATH", Path.join([config_path, "logs", "pinchflat.log"]))
metadata_path = System.get_env("METADATA_PATH", Path.join([config_path, "metadata"]))
extras_path = System.get_env("EXTRAS_PATH", Path.join([config_path, "extras"]))
+ tmpfile_path = System.get_env("TMPFILE_PATH", Path.join([System.tmp_dir!(), "pinchflat", "data"]))
+ # This one can be changed if you want
+ tz_data_path = System.get_env("TZ_DATA_PATH", Path.join([extras_path, "elixir_tz_data"]))
# For running PF as a podcast host on self-hosted environments
expose_feed_endpoints = String.length(System.get_env("EXPOSE_FEED_ENDPOINTS", "")) > 0
# For testing alternate journal modes (see issue #137)
journal_mode = String.to_existing_atom(System.get_env("JOURNAL_MODE", "wal"))
# For running PF in a subdirectory via a reverse proxy
base_route_path = System.get_env("BASE_ROUTE_PATH", "/")
+ enable_ipv6 = String.length(System.get_env("ENABLE_IPV6", "")) > 0
+ enable_prometheus = String.length(System.get_env("ENABLE_PROMETHEUS", "")) > 0
config :logger, level: String.to_existing_atom(System.get_env("LOG_LEVEL", "debug"))
config :pinchflat,
yt_dlp_executable: System.find_executable("yt-dlp"),
apprise_executable: System.find_executable("apprise"),
- media_directory: "/downloads",
+ media_directory: media_path,
metadata_directory: metadata_path,
extras_directory: extras_path,
- tmpfile_directory: Path.join([System.tmp_dir!(), "pinchflat", "data"]),
+ tmpfile_directory: tmpfile_path,
dns_cluster_query: System.get_env("DNS_CLUSTER_QUERY"),
expose_feed_endpoints: expose_feed_endpoints,
- timezone: System.get_env("TIMEZONE") || System.get_env("TZ") || "UTC",
+ # This is configured in application.ex
+ timezone: "UTC",
log_path: log_path,
base_route_path: base_route_path
- config :tzdata, :data_dir, System.get_env("TZ_DATA_DIR", "/etc/elixir_tzdata_data")
+ config :tzdata, :data_dir, tz_data_path
config :pinchflat, Pinchflat.Repo,
database: db_path,
journal_mode: journal_mode
+ config :pinchflat, Pinchflat.PromEx, disabled: !enable_prometheus
+
# The secret key base is used to sign/encrypt cookies and other secrets.
# A default value is used in config/dev.exs and config/test.exs but you
# want to use a different value for prod and you most likely don't want
@@ -106,7 +145,7 @@ if config_env() == :prod do
# Set it to {0, 0, 0, 0, 0, 0, 0, 1} for local network only access.
# See the documentation on https://hexdocs.pm/plug_cowboy/Plug.Cowboy.html
# for details about using IPv6 vs IPv4 and loopback vs public addresses.
- ip: {0, 0, 0, 0},
+ ip: if(enable_ipv6, do: {0, 0, 0, 0, 0, 0, 0, 0}, else: {0, 0, 0, 0}),
port: String.to_integer(System.get_env("PORT") || "4000")
],
url: [path: base_route_path],
diff --git a/docker-compose.ci.yml b/docker-compose.ci.yml
index 4ada043..bf26c4b 100644
--- a/docker-compose.ci.yml
+++ b/docker-compose.ci.yml
@@ -7,6 +7,9 @@ services:
- MIX_ENV=test
volumes:
- '.:/app'
+ # These lines ensure the deps can be saved as build artifacts for caching
+ - '/app/deps'
+ - '/app/_build'
ports:
- '4008:4008'
command: tail -F /dev/null
diff --git a/docker-compose.yml b/docker-compose.yml
index aad86e8..e193a16 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -10,5 +10,3 @@ services:
command: bash -c "chmod +x docker/docker-run.dev.sh && docker/docker-run.dev.sh"
stdin_open: true
tty: true
- env_file:
- - .env
diff --git a/docker/dev.Dockerfile b/docker/dev.Dockerfile
index e7c54d4..04a3e13 100644
--- a/docker/dev.Dockerfile
+++ b/docker/dev.Dockerfile
@@ -1,6 +1,7 @@
-ARG ELIXIR_VERSION=1.17.0
-ARG OTP_VERSION=26.2.5
-ARG DEBIAN_VERSION=bookworm-20240612-slim
+ARG ELIXIR_VERSION=1.18.4
+ARG OTP_VERSION=27.2.4
+ARG DEBIAN_VERSION=bookworm-20250428-slim
+
ARG DEV_IMAGE="hexpm/elixir:${ELIXIR_VERSION}-erlang-${OTP_VERSION}-debian-${DEBIAN_VERSION}"
FROM ${DEV_IMAGE}
@@ -9,10 +10,10 @@ ARG TARGETPLATFORM
RUN echo "Building for ${TARGETPLATFORM:?}"
# Install debian packages
-RUN apt-get update -qq
-RUN apt-get install -y inotify-tools curl git openssh-client jq \
- python3 python3-setuptools python3-wheel python3-dev pipx \
- python3-mutagen locales procps build-essential graphviz
+RUN apt-get update -qq && \
+ apt-get install -y inotify-tools curl git openssh-client jq \
+ python3 python3-setuptools python3-wheel python3-dev pipx \
+ python3-mutagen locales procps build-essential graphviz zsh unzip
# Install ffmpeg
RUN export FFMPEG_DOWNLOAD=$(case ${TARGETPLATFORM:-linux/amd64} in \
@@ -23,39 +24,48 @@ RUN export FFMPEG_DOWNLOAD=$(case ${TARGETPLATFORM:-linux/amd64} in \
tar -xf /tmp/ffmpeg.tar.xz --strip-components=2 --no-anchored -C /usr/bin/ "ffmpeg" && \
tar -xf /tmp/ffmpeg.tar.xz --strip-components=2 --no-anchored -C /usr/bin/ "ffprobe"
-# Install nodejs
-RUN curl -sL https://deb.nodesource.com/setup_20.x -o nodesource_setup.sh
-RUN bash nodesource_setup.sh
-RUN apt-get install nodejs
-RUN npm install -g yarn
-
-# Install baseline Elixir packages
-RUN mix local.hex --force
-RUN mix local.rebar --force
-
-# Download and update YT-DLP
-RUN curl -L https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp -o /usr/local/bin/yt-dlp
-RUN chmod a+rx /usr/local/bin/yt-dlp
-RUN yt-dlp -U
-
-# Install Apprise
-RUN export PIPX_HOME=/opt/pipx && \
- export PIPX_BIN_DIR=/usr/local/bin && \
- pipx install apprise
+# Install nodejs and Yarn
+RUN curl -sL https://deb.nodesource.com/setup_20.x -o nodesource_setup.sh && \
+ bash nodesource_setup.sh && \
+ apt-get install -y nodejs && \
+ npm install -g yarn && \
+ # Install baseline Elixir packages
+ mix local.hex --force && \
+ mix local.rebar --force && \
+ # Install Deno - required for YouTube downloads (See yt-dlp#14404)
+ curl -fsSL https://deno.land/install.sh | DENO_INSTALL=/usr/local sh -s -- -y --no-modify-path && \
+ # Download and update YT-DLP
+ export YT_DLP_DOWNLOAD=$(case ${TARGETPLATFORM:-linux/amd64} in \
+ "linux/amd64") echo "https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux" ;; \
+ "linux/arm64") echo "https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux_aarch64" ;; \
+ *) echo "https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux" ;; esac) && \
+ curl -L ${YT_DLP_DOWNLOAD} -o /usr/local/bin/yt-dlp && \
+ chmod a+rx /usr/local/bin/yt-dlp && \
+ yt-dlp -U && \
+ # Install Apprise
+ export PIPX_HOME=/opt/pipx && \
+ export PIPX_BIN_DIR=/usr/local/bin && \
+ pipx install apprise && \
+ # Set up ZSH tools
+ chsh -s $(which zsh) && \
+ sh -c "$(curl -fsSL https://raw.githubusercontent.com/ohmyzsh/ohmyzsh/master/tools/install.sh)"
# Set the locale
RUN sed -i '/en_US.UTF-8/s/^# //g' /etc/locale.gen && locale-gen
-ENV LANG en_US.UTF-8
-ENV LANGUAGE en_US:en
-ENV LC_ALL en_US.UTF-8
+ENV LANG=en_US.UTF-8
+ENV LANGUAGE=en_US:en
+ENV LC_ALL=en_US.UTF-8
-# Create app directory and copy the Elixir projects into it.
WORKDIR /app
+
+COPY mix.exs mix.lock ./
+# Install Elixir deps
+# NOTE: this has to be before the bulk copy to ensure that deps are cached
+RUN MIX_ENV=dev mix deps.get && MIX_ENV=dev mix deps.compile
+RUN MIX_ENV=test mix deps.get && MIX_ENV=test mix deps.compile
+
COPY . ./
-# Install Elixir deps
-# RUN mix archive.install github hexpm/hex branch latest
-RUN mix deps.get
# Gives us iex shell history
ENV ERL_AFLAGS="-kernel shell_history enabled"
diff --git a/docker/docker-run.dev.sh b/docker/docker-run.dev.sh
index ddae66f..8ebb1a8 100755
--- a/docker/docker-run.dev.sh
+++ b/docker/docker-run.dev.sh
@@ -2,12 +2,12 @@
set -e
-# Ensure the app's deps are installed
+echo "\nInstalling Elixir deps..."
mix deps.get
-# Install JS deps
-echo "\nInstalling JS..."
-cd assets && yarn install
+# Install both project-level and assets-level JS dependencies
+echo "\nInstalling JS deps..."
+yarn install && cd assets && yarn install
cd ..
# Potentially Set up the database
diff --git a/docker/selfhosted.Dockerfile b/docker/selfhosted.Dockerfile
index 0449c88..17f7af7 100644
--- a/docker/selfhosted.Dockerfile
+++ b/docker/selfhosted.Dockerfile
@@ -1,13 +1,13 @@
# Find eligible builder and runner images on Docker Hub. We use Ubuntu/Debian
# instead of Alpine to avoid DNS resolution issues in production.
-ARG ELIXIR_VERSION=1.17.0
-ARG OTP_VERSION=26.2.5
-ARG DEBIAN_VERSION=bookworm-20240612-slim
+ARG ELIXIR_VERSION=1.18.4
+ARG OTP_VERSION=27.2.4
+ARG DEBIAN_VERSION=bookworm-20250428-slim
ARG BUILDER_IMAGE="hexpm/elixir:${ELIXIR_VERSION}-erlang-${OTP_VERSION}-debian-${DEBIAN_VERSION}"
ARG RUNNER_IMAGE="debian:${DEBIAN_VERSION}"
-FROM ${BUILDER_IMAGE} as builder
+FROM ${BUILDER_IMAGE} AS builder
ARG TARGETPLATFORM
RUN echo "Building for ${TARGETPLATFORM:?}"
@@ -27,10 +27,10 @@ RUN apt-get update -y && \
# Hex and Rebar
mix local.hex --force && \
mix local.rebar --force && \
- # FFmpeg
+ # FFmpeg (latest build that doesn't cause an illegal instruction error for some users - see #347)
export FFMPEG_DOWNLOAD=$(case ${TARGETPLATFORM:-linux/amd64} in \
- "linux/amd64") echo "https://github.com/yt-dlp/FFmpeg-Builds/releases/download/latest/ffmpeg-master-latest-linux64-gpl.tar.xz" ;; \
- "linux/arm64") echo "https://github.com/yt-dlp/FFmpeg-Builds/releases/download/latest/ffmpeg-master-latest-linuxarm64-gpl.tar.xz" ;; \
+ "linux/amd64") echo "https://github.com/yt-dlp/FFmpeg-Builds/releases/download/autobuild-2024-07-30-14-10/ffmpeg-N-116468-g0e09f6d690-linux64-gpl.tar.xz" ;; \
+ "linux/arm64") echo "https://github.com/yt-dlp/FFmpeg-Builds/releases/download/autobuild-2024-07-30-14-10/ffmpeg-N-116468-g0e09f6d690-linuxarm64-gpl.tar.xz" ;; \
*) echo "" ;; esac) && \
curl -L ${FFMPEG_DOWNLOAD} --output /tmp/ffmpeg.tar.xz && \
tar -xf /tmp/ffmpeg.tar.xz --strip-components=2 --no-anchored -C /usr/local/bin/ "ffmpeg" && \
@@ -73,6 +73,7 @@ RUN mix release
FROM ${RUNNER_IMAGE}
+ARG TARGETPLATFORM
ARG PORT=8945
COPY --from=builder ./usr/local/bin/ffmpeg /usr/bin/ffmpeg
@@ -88,18 +89,27 @@ RUN apt-get update -y && \
ca-certificates \
python3-mutagen \
curl \
+ zip \
openssh-client \
nano \
python3 \
pipx \
jq \
+ # unzip is needed for Deno
+ unzip \
procps && \
+ # Install Deno - required for YouTube downloads (See yt-dlp#14404)
+ curl -fsSL https://deno.land/install.sh | DENO_INSTALL=/usr/local sh -s -- -y --no-modify-path && \
# Apprise
export PIPX_HOME=/opt/pipx && \
export PIPX_BIN_DIR=/usr/local/bin && \
pipx install apprise && \
# yt-dlp
- curl -L https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp -o /usr/local/bin/yt-dlp && \
+ export YT_DLP_DOWNLOAD=$(case ${TARGETPLATFORM:-linux/amd64} in \
+ "linux/amd64") echo "https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux" ;; \
+ "linux/arm64") echo "https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux_aarch64" ;; \
+ *) echo "https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux" ;; esac) && \
+ curl -L ${YT_DLP_DOWNLOAD} -o /usr/local/bin/yt-dlp && \
chmod a+rx /usr/local/bin/yt-dlp && \
yt-dlp -U && \
# Set the locale
@@ -109,36 +119,27 @@ RUN apt-get update -y && \
rm -rf /var/lib/apt/lists/*
# More locale setup
-ENV LANG en_US.UTF-8
-ENV LANGUAGE en_US:en
-ENV LC_ALL en_US.UTF-8
+ENV LANG=en_US.UTF-8
+ENV LANGUAGE=en_US:en
+ENV LC_ALL=en_US.UTF-8
WORKDIR "/app"
# Set up data volumes
-RUN mkdir /config /downloads /etc/elixir_tzdata_data && chmod ugo+rw /etc/elixir_tzdata_data
+RUN mkdir -p /config /downloads /etc/elixir_tzdata_data /etc/yt-dlp/plugins && \
+ chmod ugo+rw /etc/elixir_tzdata_data /etc/yt-dlp /etc/yt-dlp/plugins /usr/local/bin /usr/local/bin/yt-dlp
# set runner ENV
ENV MIX_ENV="prod"
ENV PORT=${PORT}
ENV RUN_CONTEXT="selfhosted"
+ENV UMASK=022
EXPOSE ${PORT}
# Only copy the final release from the build stage
COPY --from=builder /app/_build/${MIX_ENV}/rel/pinchflat ./
-# NEVER do this if you're running in an environment where you don't trust the user
-# (ie: most environments). This is only acceptable in a self-hosted environment.
-# The user could just run the whole container as root and bypass this anyway so
-# it's not a huge deal.
-# This removes the root password to allow users to assume root if needed. This is
-# preferrable to running the whole container as root so that the files/directories
-# created by the app aren't owned by root and are therefore easier for other users
-# and processes to interact with. If you want to just run the whole container as
-# root, use --user 0:0 or something.
-RUN passwd -d root
-
-HEALTHCHECK --interval=120s --start-period=10s \
+HEALTHCHECK --interval=30s --start-period=15s \
CMD curl --fail http://localhost:${PORT}/healthcheck || exit 1
# Start the app
diff --git a/lib/pinchflat/application.ex b/lib/pinchflat/application.ex
index 49adcf0..3f823f4 100644
--- a/lib/pinchflat/application.ex
+++ b/lib/pinchflat/application.ex
@@ -4,10 +4,18 @@ defmodule Pinchflat.Application do
@moduledoc false
use Application
+ require Logger
@impl true
def start(_type, _args) do
- children = [
+ check_and_update_timezone()
+ attach_oban_telemetry()
+ Logger.add_handlers(:pinchflat)
+
+ # See https://hexdocs.pm/elixir/Supervisor.html
+ # for other strategies and supported options
+ [
+ Pinchflat.PromEx,
PinchflatWeb.Telemetry,
Pinchflat.Repo,
# Must be before startup tasks
@@ -20,17 +28,11 @@ defmodule Pinchflat.Application do
{Finch, name: Pinchflat.Finch},
# Start a worker by calling: Pinchflat.Worker.start_link(arg)
# {Pinchflat.Worker, arg},
- # Start to serve requests, typically the last entry
- PinchflatWeb.Endpoint
+ # Start to serve requests, typically the last entry (except for the post-boot tasks)
+ PinchflatWeb.Endpoint,
+ Pinchflat.Boot.PostBootStartupTasks
]
-
- attach_oban_telemetry()
- Logger.add_handlers(:pinchflat)
-
- # See https://hexdocs.pm/elixir/Supervisor.html
- # for other strategies and supported options
- opts = [strategy: :one_for_one, name: Pinchflat.Supervisor]
- Supervisor.start_link(children, opts)
+ |> Supervisor.start_link(strategy: :one_for_one, name: Pinchflat.Supervisor)
end
# Tell Phoenix to update the endpoint configuration
@@ -47,4 +49,20 @@ defmodule Pinchflat.Application do
:ok = Oban.Telemetry.attach_default_logger()
:telemetry.attach_many("job-telemetry-broadcast", events, &PinchflatWeb.Telemetry.job_state_change_broadcast/4, [])
end
+
+ # This has to be here (rather than runtime.exs) since the `tzdata` application
+ # has to be started before we can check the timezone
+ defp check_and_update_timezone do
+ attempted_timezone = System.get_env("TIMEZONE") || System.get_env("TZ") || "UTC"
+
+ valid_timezone =
+ if Tzdata.zone_exists?(attempted_timezone) do
+ attempted_timezone
+ else
+ Logger.warning("Invalid timezone #{attempted_timezone}, defaulting to UTC")
+ "UTC"
+ end
+
+ Application.put_env(:pinchflat, :timezone, valid_timezone)
+ end
end
diff --git a/lib/pinchflat/boot/post_boot_startup_tasks.ex b/lib/pinchflat/boot/post_boot_startup_tasks.ex
new file mode 100644
index 0000000..d6ae6eb
--- /dev/null
+++ b/lib/pinchflat/boot/post_boot_startup_tasks.ex
@@ -0,0 +1,46 @@
+defmodule Pinchflat.Boot.PostBootStartupTasks do
+ @moduledoc """
+ This module is responsible for running startup tasks on app boot
+ AFTER all other boot steps have taken place and the app is ready to serve requests.
+
+ It's a GenServer because that plays REALLY nicely with the existing
+ Phoenix supervision tree.
+ """
+
+ alias Pinchflat.YtDlp.UpdateWorker, as: YtDlpUpdateWorker
+
+ # restart: :temporary means that this process will never be restarted (ie: will run once and then die)
+ use GenServer, restart: :temporary
+ import Ecto.Query, warn: false
+
+ def start_link(opts \\ []) do
+ GenServer.start_link(__MODULE__, %{env: Application.get_env(:pinchflat, :env)}, opts)
+ end
+
+ @doc """
+ Runs post-boot application startup tasks.
+
+ Any code defined here will run every time the application starts. You must
+ make sure that the code is idempotent and safe to run multiple times.
+
+ This is a good place to set up default settings, create initial records, stuff like that.
+ Should be fast - anything with the potential to be slow should be kicked off as a job instead.
+ """
+ @impl true
+ def init(%{env: :test} = state) do
+ # Do nothing _as part of the app bootup process_.
+ # Since bootup calls `start_link` and that's where the `env` state is injected,
+ # you can still call `.init()` manually to run these tasks for testing purposes
+ {:ok, state}
+ end
+
+ def init(state) do
+ update_yt_dlp()
+
+ {:ok, state}
+ end
+
+ defp update_yt_dlp do
+ YtDlpUpdateWorker.kickoff()
+ end
+end
diff --git a/lib/pinchflat/boot/post_job_startup_tasks.ex b/lib/pinchflat/boot/post_job_startup_tasks.ex
index 5043a25..6eba701 100644
--- a/lib/pinchflat/boot/post_job_startup_tasks.ex
+++ b/lib/pinchflat/boot/post_job_startup_tasks.ex
@@ -1,7 +1,7 @@
defmodule Pinchflat.Boot.PostJobStartupTasks do
@moduledoc """
This module is responsible for running startup tasks on app boot
- AFTER the job runner has initiallized.
+ AFTER the job runner has initialized.
It's a GenServer because that plays REALLY nicely with the existing
Phoenix supervision tree.
@@ -12,7 +12,7 @@ defmodule Pinchflat.Boot.PostJobStartupTasks do
import Ecto.Query, warn: false
def start_link(opts \\ []) do
- GenServer.start_link(__MODULE__, %{}, opts)
+ GenServer.start_link(__MODULE__, %{env: Application.get_env(:pinchflat, :env)}, opts)
end
@doc """
@@ -25,6 +25,13 @@ defmodule Pinchflat.Boot.PostJobStartupTasks do
Should be fast - anything with the potential to be slow should be kicked off as a job instead.
"""
@impl true
+ def init(%{env: :test} = state) do
+ # Do nothing _as part of the app bootup process_.
+ # Since bootup calls `start_link` and that's where the `env` state is injected,
+ # you can still call `.init()` manually to run these tasks for testing purposes
+ {:ok, state}
+ end
+
def init(state) do
# Nothing at the moment!
diff --git a/lib/pinchflat/boot/pre_job_startup_tasks.ex b/lib/pinchflat/boot/pre_job_startup_tasks.ex
index 85fb399..5035e35 100644
--- a/lib/pinchflat/boot/pre_job_startup_tasks.ex
+++ b/lib/pinchflat/boot/pre_job_startup_tasks.ex
@@ -16,8 +16,10 @@ defmodule Pinchflat.Boot.PreJobStartupTasks do
alias Pinchflat.Settings
alias Pinchflat.Utils.FilesystemUtils
+ alias Pinchflat.Lifecycle.UserScripts.CommandRunner, as: UserScriptRunner
+
def start_link(opts \\ []) do
- GenServer.start_link(__MODULE__, %{}, opts)
+ GenServer.start_link(__MODULE__, %{env: Application.get_env(:pinchflat, :env)}, opts)
end
@doc """
@@ -30,12 +32,20 @@ defmodule Pinchflat.Boot.PreJobStartupTasks do
Should be fast - anything with the potential to be slow should be kicked off as a job instead.
"""
@impl true
+ def init(%{env: :test} = state) do
+ # Do nothing _as part of the app bootup process_.
+ # Since bootup calls `start_link` and that's where the `env` state is injected,
+ # you can still call `.init()` manually to run these tasks for testing purposes
+ {:ok, state}
+ end
+
def init(state) do
ensure_tmpfile_directory()
reset_executing_jobs()
create_blank_yt_dlp_files()
create_blank_user_script_file()
apply_default_settings()
+ run_app_init_script()
{:ok, state}
end
@@ -95,6 +105,12 @@ defmodule Pinchflat.Boot.PreJobStartupTasks do
Settings.set(apprise_version: apprise_version)
end
+ defp run_app_init_script do
+ runner = Application.get_env(:pinchflat, :user_script_runner, UserScriptRunner)
+
+ runner.run(:app_init, %{})
+ end
+
defp yt_dlp_runner do
Application.get_env(:pinchflat, :yt_dlp_runner)
end
diff --git a/lib/pinchflat/downloading/download_option_builder.ex b/lib/pinchflat/downloading/download_option_builder.ex
index f1817f4..ab2b56a 100644
--- a/lib/pinchflat/downloading/download_option_builder.ex
+++ b/lib/pinchflat/downloading/download_option_builder.ex
@@ -4,10 +4,10 @@ defmodule Pinchflat.Downloading.DownloadOptionBuilder do
"""
alias Pinchflat.Sources
- alias Pinchflat.Settings
alias Pinchflat.Sources.Source
alias Pinchflat.Media.MediaItem
alias Pinchflat.Downloading.OutputPathBuilder
+ alias Pinchflat.Downloading.QualityOptionBuilder
alias Pinchflat.Utils.FilesystemUtils, as: FSUtils
@@ -34,21 +34,38 @@ defmodule Pinchflat.Downloading.DownloadOptionBuilder do
@doc """
Builds the output path for yt-dlp to download media based on the given source's
- media profile. Uses the source's override output path template if it exists.
+ or media_item's media profile. Uses the source's override output path template if it exists.
Accepts a %MediaItem{} or %Source{} struct. If a %Source{} struct is passed, it
will use a default %MediaItem{} struct with the given source.
Returns binary()
"""
+ def build_output_path_for(%Source{} = source_with_preloads) do
+ build_output_path_for(%MediaItem{source: source_with_preloads})
+ end
+
def build_output_path_for(%MediaItem{} = media_item_with_preloads) do
output_path_template = Sources.output_path_template(media_item_with_preloads.source)
build_output_path(output_path_template, media_item_with_preloads)
end
- def build_output_path_for(%Source{} = source_with_preloads) do
- build_output_path_for(%MediaItem{source: source_with_preloads})
+ @doc """
+ Builds the quality options for yt-dlp to download media based on the given source's
+ or media_item's media profile. Useful for helping predict final filepath of downloaded
+ media.
+
+ returns [Keyword.t()]
+ """
+ def build_quality_options_for(%Source{} = source_with_preloads) do
+ build_quality_options_for(%MediaItem{source: source_with_preloads})
+ end
+
+ def build_quality_options_for(%MediaItem{} = media_item_with_preloads) do
+ media_profile = media_item_with_preloads.source.media_profile
+
+ quality_options(media_profile)
end
defp default_options(override_opts) do
@@ -74,6 +91,9 @@ defmodule Pinchflat.Downloading.DownloadOptionBuilder do
{{:download_auto_subs, true}, %{download_subs: true}} ->
acc ++ [:write_auto_subs]
+ {{:download_auto_subs, true}, %{embed_subs: true}} ->
+ acc ++ [:write_auto_subs]
+
{{:embed_subs, true}, %{preferred_resolution: pr}} when pr != :audio ->
acc ++ [:embed_subs]
@@ -122,26 +142,7 @@ defmodule Pinchflat.Downloading.DownloadOptionBuilder do
end
defp quality_options(media_profile) do
- vcodec = Settings.get!(:video_codec_preference)
- acodec = Settings.get!(:audio_codec_preference)
-
- case media_profile.preferred_resolution do
- # Also be aware that :audio disabled all embedding options for subtitles
- :audio ->
- [:extract_audio, format_sort: "+acodec:#{acodec}"]
-
- resolution_atom ->
- {resolution_string, _} =
- resolution_atom
- |> Atom.to_string()
- |> Integer.parse()
-
- [
- # Since Plex doesn't support reading metadata from MKV
- remux_video: "mp4",
- format_sort: "res:#{resolution_string},+codec:#{vcodec}:#{acodec}"
- ]
- end
+ QualityOptionBuilder.build(media_profile)
end
defp sponsorblock_options(media_profile) do
@@ -151,6 +152,7 @@ defmodule Pinchflat.Downloading.DownloadOptionBuilder do
case {behaviour, categories} do
{_, []} -> []
{:remove, _} -> [sponsorblock_remove: Enum.join(categories, ",")]
+ {:mark, _} -> [sponsorblock_mark: Enum.join(categories, ",")]
{:disabled, _} -> []
end
end
@@ -199,14 +201,15 @@ defmodule Pinchflat.Downloading.DownloadOptionBuilder do
source = media_item_with_preloads.source
%{
+ "media_item_id" => to_string(media_item_with_preloads.id),
+ "source_id" => to_string(source.id),
+ "media_profile_id" => to_string(source.media_profile_id),
"source_custom_name" => source.custom_name,
"source_collection_id" => source.collection_id,
"source_collection_name" => source.collection_name,
"source_collection_type" => to_string(source.collection_type),
- "media_upload_date_index" =>
- media_item_with_preloads.upload_date_index
- |> to_string()
- |> String.pad_leading(2, "0")
+ "media_playlist_index" => pad_int(media_item_with_preloads.playlist_index),
+ "media_upload_date_index" => pad_int(media_item_with_preloads.upload_date_index)
}
end
@@ -223,6 +226,12 @@ defmodule Pinchflat.Downloading.DownloadOptionBuilder do
|> build_output_path(media_item_with_preloads)
end
+ defp pad_int(integer, count \\ 2, padding \\ "0") do
+ integer
+ |> to_string()
+ |> String.pad_leading(count, padding)
+ end
+
defp base_directory do
Application.get_env(:pinchflat, :media_directory)
end
diff --git a/lib/pinchflat/downloading/downloading_helpers.ex b/lib/pinchflat/downloading/downloading_helpers.ex
index cb679a8..eae187c 100644
--- a/lib/pinchflat/downloading/downloading_helpers.ex
+++ b/lib/pinchflat/downloading/downloading_helpers.ex
@@ -27,13 +27,15 @@ defmodule Pinchflat.Downloading.DownloadingHelpers do
Returns :ok
"""
- def enqueue_pending_download_tasks(%Source{download_media: true} = source) do
+ def enqueue_pending_download_tasks(source, job_opts \\ [])
+
+ def enqueue_pending_download_tasks(%Source{download_media: true} = source, job_opts) do
source
|> Media.list_pending_media_items_for()
- |> Enum.each(&MediaDownloadWorker.kickoff_with_task/1)
+ |> Enum.each(&MediaDownloadWorker.kickoff_with_task(&1, %{}, job_opts))
end
- def enqueue_pending_download_tasks(%Source{download_media: false}) do
+ def enqueue_pending_download_tasks(%Source{download_media: false}, _job_opts) do
:ok
end
@@ -55,13 +57,13 @@ defmodule Pinchflat.Downloading.DownloadingHelpers do
Returns {:ok, %Task{}} | {:error, :should_not_download} | {:error, any()}
"""
- def kickoff_download_if_pending(%MediaItem{} = media_item) do
+ def kickoff_download_if_pending(%MediaItem{} = media_item, job_opts \\ []) do
media_item = Repo.preload(media_item, :source)
if media_item.source.download_media && Media.pending_download?(media_item) do
Logger.info("Kicking off download for media item ##{media_item.id} (#{media_item.media_id})")
- MediaDownloadWorker.kickoff_with_task(media_item)
+ MediaDownloadWorker.kickoff_with_task(media_item, %{}, job_opts)
else
{:error, :should_not_download}
end
@@ -91,8 +93,7 @@ defmodule Pinchflat.Downloading.DownloadingHelpers do
[m, s, mp],
^MediaQuery.for_source(source) and
^MediaQuery.downloaded() and
- not (^MediaQuery.download_prevented()) and
- not (^MediaQuery.culled())
+ not (^MediaQuery.download_prevented())
)
)
|> Repo.all()
diff --git a/lib/pinchflat/downloading/media_download_worker.ex b/lib/pinchflat/downloading/media_download_worker.ex
index 42c5ca4..a0fbceb 100644
--- a/lib/pinchflat/downloading/media_download_worker.ex
+++ b/lib/pinchflat/downloading/media_download_worker.ex
@@ -3,6 +3,7 @@ defmodule Pinchflat.Downloading.MediaDownloadWorker do
use Oban.Worker,
queue: :media_fetching,
+ priority: 5,
unique: [period: :infinity, states: [:available, :scheduled, :retryable, :executing]],
tags: ["media_item", "media_fetching", "show_in_dashboard"]
@@ -12,6 +13,7 @@ defmodule Pinchflat.Downloading.MediaDownloadWorker do
alias Pinchflat.Tasks
alias Pinchflat.Repo
alias Pinchflat.Media
+ alias Pinchflat.Media.FileSyncing
alias Pinchflat.Downloading.MediaDownloader
alias Pinchflat.Lifecycle.UserScripts.CommandRunner, as: UserScriptRunner
@@ -39,20 +41,16 @@ defmodule Pinchflat.Downloading.MediaDownloadWorker do
- `quality_upgrade?`: re-downloads media, including the video. Does not force download
if the source is set to not download media
- Returns :ok | {:ok, %MediaItem{}} | {:error, any, ...any}
+ Returns :ok | {:error, any, ...any}
"""
@impl Oban.Worker
def perform(%Oban.Job{args: %{"id" => media_item_id} = args}) do
should_force = Map.get(args, "force", false)
is_quality_upgrade = Map.get(args, "quality_upgrade?", false)
- media_item =
- media_item_id
- |> Media.get_media_item!()
- |> Repo.preload(:source)
+ media_item = fetch_and_run_prevent_download_user_script(media_item_id)
- # If the source or media item is set to not download media, perform a no-op unless forced
- if (media_item.source.download_media && !media_item.prevent_download) || should_force do
+ if should_download_media?(media_item, should_force, is_quality_upgrade) do
download_media_and_schedule_jobs(media_item, is_quality_upgrade, should_force)
else
:ok
@@ -62,6 +60,34 @@ defmodule Pinchflat.Downloading.MediaDownloadWorker do
Ecto.StaleEntryError -> Logger.info("#{__MODULE__} discarded: media item #{media_item_id} stale")
end
+ # If this is a quality upgrade, only check if the source is set to download media
+ # or that the media item's download hasn't been prevented
+ defp should_download_media?(media_item, should_force, true = _is_quality_upgrade) do
+ (media_item.source.download_media && !media_item.prevent_download) || should_force
+ end
+
+ # If it's not a quality upgrade, additionally check if the media item is pending download
+ defp should_download_media?(media_item, should_force, _is_quality_upgrade) do
+ source = media_item.source
+ is_pending = Media.pending_download?(media_item)
+
+ (is_pending && source.download_media && !media_item.prevent_download) || should_force
+ end
+
+ # If a user script exists and, when run, returns a non-zero exit code, prevent this and all future downloads
+ # of the media item.
+ defp fetch_and_run_prevent_download_user_script(media_item_id) do
+ media_item = Media.get_media_item!(media_item_id)
+
+ {:ok, media_item} =
+ case run_user_script(:media_pre_download, media_item) do
+ {:ok, _, exit_code} when exit_code != 0 -> Media.update_media_item(media_item, %{prevent_download: true})
+ _ -> {:ok, media_item}
+ end
+
+ Repo.preload(media_item, :source)
+ end
+
defp download_media_and_schedule_jobs(media_item, is_quality_upgrade, should_force) do
overwrite_behaviour = if should_force || is_quality_upgrade, do: :force_overwrites, else: :no_force_overwrites
override_opts = [overwrite_behaviour: overwrite_behaviour]
@@ -74,14 +100,18 @@ defmodule Pinchflat.Downloading.MediaDownloadWorker do
media_redownloaded_at: get_redownloaded_at(is_quality_upgrade)
})
- :ok = run_user_script(updated_media_item)
+ :ok = FileSyncing.delete_outdated_files(media_item, updated_media_item)
+ run_user_script(:media_downloaded, updated_media_item)
- {:ok, updated_media_item}
+ :ok
- {:recovered, _} ->
+ {:recovered, _media_item, _message} ->
{:error, :retry}
- {:error, message} ->
+ {:error, :unsuitable_for_download, _message} ->
+ {:ok, :non_retry}
+
+ {:error, _error_atom, message} ->
action_on_error(message)
end
end
@@ -99,7 +129,11 @@ defmodule Pinchflat.Downloading.MediaDownloadWorker do
defp action_on_error(message) do
# This will attempt re-download at the next indexing, but it won't be retried
# immediately as part of job failure logic
- non_retryable_errors = ["Video unavailable"]
+ non_retryable_errors = [
+ "Video unavailable",
+ "Sign in to confirm",
+ "This video is available to this channel's members"
+ ]
if String.contains?(to_string(message), non_retryable_errors) do
Logger.error("yt-dlp download will not be retried: #{inspect(message)}")
@@ -112,9 +146,9 @@ defmodule Pinchflat.Downloading.MediaDownloadWorker do
# NOTE: I like this pattern of using the default value so that I don't have to
# define it in config.exs (and friends). Consider using this elsewhere.
- defp run_user_script(media_item) do
+ defp run_user_script(event, media_item) do
runner = Application.get_env(:pinchflat, :user_script_runner, UserScriptRunner)
- runner.run(:media_downloaded, media_item)
+ runner.run(event, media_item)
end
end
diff --git a/lib/pinchflat/downloading/media_downloader.ex b/lib/pinchflat/downloading/media_downloader.ex
index eb7a09f..1a1ee2f 100644
--- a/lib/pinchflat/downloading/media_downloader.ex
+++ b/lib/pinchflat/downloading/media_downloader.ex
@@ -9,7 +9,9 @@ defmodule Pinchflat.Downloading.MediaDownloader do
alias Pinchflat.Repo
alias Pinchflat.Media
+ alias Pinchflat.Sources
alias Pinchflat.Media.MediaItem
+ alias Pinchflat.Utils.StringUtils
alias Pinchflat.Metadata.NfoBuilder
alias Pinchflat.Metadata.MetadataParser
alias Pinchflat.Metadata.MetadataFileHelpers
@@ -20,16 +22,57 @@ defmodule Pinchflat.Downloading.MediaDownloader do
@doc """
Downloads media for a media item, updating the media item based on the metadata
- returned by yt-dlp. Also saves the entire metadata response to the associated
- media_metadata record.
+ returned by yt-dlp. Encountered errors are saved to the Media Item record. Saves
+ the entire metadata response to the associated media_metadata record.
- NOTE: related methods (like the download worker) won't download if the media item's source
+ NOTE: related methods (like the download worker) won't download if Pthe media item's source
is set to not download media. However, I'm not enforcing that here since I need this for testing.
This may change in the future but I'm not stressed.
- Returns {:ok, %MediaItem{}} | {:error, any, ...any}
+ Returns {:ok, %MediaItem{}} | {:error, atom(), String.t()} | {:recovered, %MediaItem{}, String.t()}
"""
def download_for_media_item(%MediaItem{} = media_item, override_opts \\ []) do
+ case attempt_download_and_update_for_media_item(media_item, override_opts) do
+ {:ok, media_item} ->
+ # Returns {:ok, %MediaItem{}}
+ Media.update_media_item(media_item, %{last_error: nil})
+
+ {:error, error_atom, message} ->
+ Media.update_media_item(media_item, %{last_error: StringUtils.wrap_string(message)})
+
+ {:error, error_atom, message}
+
+ {:recovered, media_item, message} ->
+ {:ok, updated_media_item} = Media.update_media_item(media_item, %{last_error: StringUtils.wrap_string(message)})
+
+ {:recovered, updated_media_item, message}
+ end
+ end
+
+ # Looks complicated, but here's the key points:
+ # - download_with_options runs a pre-check to see if the media item is suitable for download.
+ # - If the media item fails the precheck, it returns {:error, :unsuitable_for_download, message}
+ # - However, if the precheck fails in a way that we think can be fixed by using cookies, we retry with cookies
+ # and return the result of that
+ # - If the precheck passes but the download fails, it normally returns {:error, :download_failed, message}
+ # - However, there are some errors we can recover from (eg: failure to communicate with SponsorBlock).
+ # In this case, we attempt the download anyway and update the media item with what details we do have.
+ # This case returns {:recovered, updated_media_item, message}
+ # - If we attempt a retry but it fails, we return {:error, :unrecoverable, message}
+ # - If there is an unknown error unrelated to the above, we return {:error, :unknown, message}
+ # - Finally, if there is no error, we update the media item with the parsed JSON and return {:ok, updated_media_item}
+ #
+ # Restated, here are the return values for each case:
+ # - On success: {:ok, updated_media_item}
+ # - On initial failure but successfully recovered: {:recovered, updated_media_item, message}
+ # - On error: {:error, error_atom, message} where error_atom is one of:
+ # - `:unsuitable_for_download` if the media item fails the precheck
+ # - `:unrecoverable` if there was an initial failure and the recovery attempt failed
+ # - `:download_failed` for all other yt-dlp-related downloading errors
+ # - `:unknown` for any other errors, including those not related to yt-dlp
+ # - If we retry using cookies, all of the above return values apply. The cookie retry
+ # logic is handled transparently as far as the caller is concerned
+ defp attempt_download_and_update_for_media_item(media_item, override_opts) do
output_filepath = FilesystemUtils.generate_metadata_tmpfile(:json)
media_with_preloads = Repo.preload(media_item, [:metadata, source: :media_profile])
@@ -37,25 +80,31 @@ defmodule Pinchflat.Downloading.MediaDownloader do
{:ok, parsed_json} ->
update_media_item_from_parsed_json(media_with_preloads, parsed_json)
+ {:error, :unsuitable_for_download} ->
+ message =
+ "Media item ##{media_with_preloads.id} isn't suitable for download yet. May be an active or processing live stream"
+
+ Logger.warning(message)
+
+ {:error, :unsuitable_for_download, message}
+
{:error, message, _exit_code} ->
Logger.error("yt-dlp download error for media item ##{media_with_preloads.id}: #{inspect(message)}")
if String.contains?(to_string(message), recoverable_errors()) do
- attempt_update_media_item(media_with_preloads, output_filepath)
-
- {:recovered, message}
+ attempt_recovery_from_error(media_with_preloads, output_filepath, message)
else
- {:error, message}
+ {:error, :download_failed, message}
end
err ->
Logger.error("Unknown error downloading media item ##{media_with_preloads.id}: #{inspect(err)}")
- {:error, "Unknown error: #{inspect(err)}"}
+ {:error, :unknown, "Unknown error: #{inspect(err)}"}
end
end
- defp attempt_update_media_item(media_with_preloads, output_filepath) do
+ defp attempt_recovery_from_error(media_with_preloads, output_filepath, error_message) do
with {:ok, contents} <- File.read(output_filepath),
{:ok, parsed_json} <- Phoenix.json_library().decode(contents) do
Logger.info("""
@@ -64,12 +113,13 @@ defmodule Pinchflat.Downloading.MediaDownloader do
anyway
""")
- update_media_item_from_parsed_json(media_with_preloads, parsed_json)
+ {:ok, updated_media_item} = update_media_item_from_parsed_json(media_with_preloads, parsed_json)
+ {:recovered, updated_media_item, error_message}
else
err ->
Logger.error("Unable to recover error for media item ##{media_with_preloads.id}: #{inspect(err)}")
- {:error, :retry_failed}
+ {:error, :unrecoverable, error_message}
end
end
@@ -79,6 +129,7 @@ defmodule Pinchflat.Downloading.MediaDownloader do
|> MetadataParser.parse_for_media_item()
|> Map.merge(%{
media_downloaded_at: DateTime.utc_now(),
+ culled_at: nil,
nfo_filepath: determine_nfo_filepath(media_with_preloads, parsed_json),
metadata: %{
# IDEA: might be worth kicking off a job for this since thumbnail fetching
@@ -105,8 +156,49 @@ defmodule Pinchflat.Downloading.MediaDownloader do
defp download_with_options(url, item_with_preloads, output_filepath, override_opts) do
{:ok, options} = DownloadOptionBuilder.build(item_with_preloads, override_opts)
+ force_use_cookies = Keyword.get(override_opts, :force_use_cookies, false)
+ source_uses_cookies = Sources.use_cookies?(item_with_preloads.source, :downloading)
+ should_use_cookies = force_use_cookies || source_uses_cookies
- YtDlpMedia.download(url, options, output_filepath: output_filepath)
+ runner_opts = [output_filepath: output_filepath, use_cookies: should_use_cookies]
+
+ case {YtDlpMedia.get_downloadable_status(url, use_cookies: should_use_cookies), should_use_cookies} do
+ {{:ok, :downloadable}, _} ->
+ YtDlpMedia.download(url, options, runner_opts)
+
+ {{:ok, :ignorable}, _} ->
+ {:error, :unsuitable_for_download}
+
+ {{:error, _message, _exit_code} = err, false} ->
+ # If there was an error and we don't have cookies, this method will retry with cookies
+ # if doing so would help AND the source allows. Otherwise, it will return the error as-is
+ maybe_retry_with_cookies(url, item_with_preloads, output_filepath, override_opts, err)
+
+ # This gets hit if cookies are enabled which, importantly, also covers the case where we
+ # retry a download with cookies and it fails again
+ {{:error, message, exit_code}, true} ->
+ {:error, message, exit_code}
+
+ {err, _} ->
+ err
+ end
+ end
+
+ defp maybe_retry_with_cookies(url, item_with_preloads, output_filepath, override_opts, err) do
+ {:error, message, _} = err
+ source = item_with_preloads.source
+ message_contains_cookie_error = String.contains?(to_string(message), recoverable_cookie_errors())
+
+ if Sources.use_cookies?(source, :error_recovery) && message_contains_cookie_error do
+ download_with_options(
+ url,
+ item_with_preloads,
+ output_filepath,
+ Keyword.put(override_opts, :force_use_cookies, true)
+ )
+ else
+ err
+ end
end
defp recoverable_errors do
@@ -114,4 +206,11 @@ defmodule Pinchflat.Downloading.MediaDownloader do
"Unable to communicate with SponsorBlock"
]
end
+
+ defp recoverable_cookie_errors do
+ [
+ "Sign in to confirm",
+ "This video is available to this channel's members"
+ ]
+ end
end
diff --git a/lib/pinchflat/downloading/media_quality_upgrade_worker.ex b/lib/pinchflat/downloading/media_quality_upgrade_worker.ex
index 4f554fa..8fcfb22 100644
--- a/lib/pinchflat/downloading/media_quality_upgrade_worker.ex
+++ b/lib/pinchflat/downloading/media_quality_upgrade_worker.ex
@@ -23,10 +23,10 @@ defmodule Pinchflat.Downloading.MediaQualityUpgradeWorker do
"""
@impl Oban.Worker
def perform(%Oban.Job{}) do
- redownloadable_media = Media.list_redownloadable_media_items()
- Logger.info("Redownloading #{length(redownloadable_media)} media items")
+ upgradable_media = Media.list_upgradeable_media_items()
+ Logger.info("Redownloading #{length(upgradable_media)} media items")
- Enum.each(redownloadable_media, fn media_item ->
+ Enum.each(upgradable_media, fn media_item ->
MediaDownloadWorker.kickoff_with_task(media_item, %{quality_upgrade?: true})
end)
end
diff --git a/lib/pinchflat/downloading/media_retention_worker.ex b/lib/pinchflat/downloading/media_retention_worker.ex
index 3e5c0d0..461ac1c 100644
--- a/lib/pinchflat/downloading/media_retention_worker.ex
+++ b/lib/pinchflat/downloading/media_retention_worker.ex
@@ -2,12 +2,15 @@ defmodule Pinchflat.Downloading.MediaRetentionWorker do
@moduledoc false
use Oban.Worker,
- queue: :local_metadata,
+ queue: :local_data,
unique: [period: :infinity, states: [:available, :scheduled, :retryable, :executing]],
- tags: ["media_item", "local_metadata"]
+ tags: ["media_item", "local_data"]
+
+ use Pinchflat.Media.MediaQuery
require Logger
+ alias Pinchflat.Repo
alias Pinchflat.Media
@doc """
@@ -20,14 +23,53 @@ defmodule Pinchflat.Downloading.MediaRetentionWorker do
"""
@impl Oban.Worker
def perform(%Oban.Job{}) do
- cullable_media = Media.list_cullable_media_items()
+ cull_cullable_media_items()
+ delete_media_items_from_before_cutoff()
+
+ :ok
+ end
+
+ defp cull_cullable_media_items do
+ cullable_media =
+ MediaQuery.new()
+ |> MediaQuery.require_assoc(:source)
+ |> where(^MediaQuery.cullable())
+ |> Repo.all()
+
Logger.info("Culling #{length(cullable_media)} media items past their retention date")
Enum.each(cullable_media, fn media_item ->
+ # Setting `prevent_download` does what it says on the tin, but `culled_at` is purely informational.
+ # We don't actually do anything with that in terms of queries and it gets set to nil if the media item
+ # gets re-downloaded.
Media.delete_media_files(media_item, %{
prevent_download: true,
culled_at: DateTime.utc_now()
})
end)
end
+
+ # NOTE: Since this is a date and not a datetime, we can't add logic to have to-the-minute
+ # comparison like we can with retention periods. We can only compare to the day.
+ defp delete_media_items_from_before_cutoff do
+ deletable_media =
+ MediaQuery.new()
+ |> MediaQuery.require_assoc(:source)
+ |> where(^MediaQuery.deletable_based_on_source_cutoff())
+ |> Repo.all()
+
+ Logger.info("Deleting #{length(deletable_media)} media items that are from before the source cutoff")
+
+ Enum.each(deletable_media, fn media_item ->
+ # Note that I'm not setting `prevent_download` on the media_item here.
+ # That's because cutoff_date can easily change and it's a valid behavior to re-download older
+ # media items if the cutoff_date changes.
+ # Download is ultimately prevented because `MediaQuery.pending()` only returns media items
+ # from after the cutoff date (among other things), so it's not like the media will just immediately
+ # be re-downloaded.
+ Media.delete_media_files(media_item, %{
+ culled_at: DateTime.utc_now()
+ })
+ end)
+ end
end
diff --git a/lib/pinchflat/downloading/output_path_builder.ex b/lib/pinchflat/downloading/output_path_builder.ex
index d4f6e27..1c1392f 100644
--- a/lib/pinchflat/downloading/output_path_builder.ex
+++ b/lib/pinchflat/downloading/output_path_builder.ex
@@ -54,7 +54,12 @@ defmodule Pinchflat.Downloading.OutputPathBuilder do
"season_from_date" => "%(upload_date>%Y)S",
"season_episode_from_date" => "s%(upload_date>%Y)Se%(upload_date>%m%d)S",
"season_episode_index_from_date" => "s%(upload_date>%Y)Se%(upload_date>%m%d)S{{ media_upload_date_index }}",
- "artist_name" => "%(artist,creator,uploader,uploader_id)S"
+ "artist_name" => "%(artist,creator,uploader,uploader_id)S",
+ "static_season__episode_by_index" => "Season 1/s01e{{ media_playlist_index }}",
+ "static_season__episode_by_date" => "Season 1/s01e%(upload_date>%y%m%d)S",
+ "season_by_year__episode_by_date" => "Season %(upload_date>%Y)S/s%(upload_date>%Y)Se%(upload_date>%m%d)S",
+ "season_by_year__episode_by_date_and_index" =>
+ "Season %(upload_date>%Y)S/s%(upload_date>%Y)Se%(upload_date>%m%d)S{{ media_upload_date_index }}"
}
end
end
diff --git a/lib/pinchflat/downloading/quality_option_builder.ex b/lib/pinchflat/downloading/quality_option_builder.ex
new file mode 100644
index 0000000..cb89435
--- /dev/null
+++ b/lib/pinchflat/downloading/quality_option_builder.ex
@@ -0,0 +1,66 @@
+defmodule Pinchflat.Downloading.QualityOptionBuilder do
+ @moduledoc """
+ A standalone builder module for building quality-related options for yt-dlp to download media.
+
+ Currently exclusively used in DownloadOptionBuilder since this logic is too complex to just
+ place in the main module.
+ """
+
+ alias Pinchflat.Settings
+ alias Pinchflat.Profiles.MediaProfile
+
+ @doc """
+ Builds the quality-related options for yt-dlp to download media based on the given media profile
+
+ Includes things like container, preferred format/codec, and audio track options.
+ """
+ def build(%MediaProfile{preferred_resolution: :audio, media_container: container} = media_profile) do
+ acodec = Settings.get!(:audio_codec_preference)
+
+ [
+ :extract_audio,
+ format_sort: "+acodec:#{acodec}",
+ audio_format: container || "best",
+ format: build_format_string(media_profile)
+ ]
+ end
+
+ def build(%MediaProfile{preferred_resolution: resolution_atom, media_container: container} = media_profile) do
+ vcodec = Settings.get!(:video_codec_preference)
+ acodec = Settings.get!(:audio_codec_preference)
+ {resolution_string, _} = resolution_atom |> Atom.to_string() |> Integer.parse()
+
+ [
+ # Since Plex doesn't support reading metadata from MKV
+ remux_video: container || "mp4",
+ format_sort: "res:#{resolution_string},+codec:#{vcodec}:#{acodec}",
+ format: build_format_string(media_profile)
+ ]
+ end
+
+ defp build_format_string(%MediaProfile{preferred_resolution: :audio, audio_track: audio_track}) do
+ if audio_track do
+ "bestaudio[#{build_format_modifier(audio_track)}]/bestaudio/best"
+ else
+ "bestaudio/best"
+ end
+ end
+
+ defp build_format_string(%MediaProfile{audio_track: audio_track}) do
+ if audio_track do
+ "bestvideo+bestaudio[#{build_format_modifier(audio_track)}]/bestvideo*+bestaudio/best"
+ else
+ "bestvideo*+bestaudio/best"
+ end
+ end
+
+ # Reminder to self: this conflicts with `--extractor-args "youtube:lang="`
+ # since that will translate the format_notes as well, which means they may not match.
+ # At least that's what happens now - worth a re-check if I have to come back to this
+ defp build_format_modifier("original"), do: "format_note*=original"
+ defp build_format_modifier("default"), do: "format_note*='(default)'"
+ # This uses the carat to anchor the language to the beginning of the string
+ # since that's what's needed to match `en` to `en-US` and `en-GB`, etc. The user
+ # can always specify the full language code if they want.
+ defp build_format_modifier(language_code), do: "language^=#{language_code}"
+end
diff --git a/lib/pinchflat/fast_indexing/fast_indexing_helpers.ex b/lib/pinchflat/fast_indexing/fast_indexing_helpers.ex
index 02fc00f..15a4342 100644
--- a/lib/pinchflat/fast_indexing/fast_indexing_helpers.ex
+++ b/lib/pinchflat/fast_indexing/fast_indexing_helpers.ex
@@ -11,13 +11,28 @@ defmodule Pinchflat.FastIndexing.FastIndexingHelpers do
alias Pinchflat.Repo
alias Pinchflat.Media
+ alias Pinchflat.Tasks
+ alias Pinchflat.Sources
alias Pinchflat.Sources.Source
alias Pinchflat.FastIndexing.YoutubeRss
alias Pinchflat.FastIndexing.YoutubeApi
alias Pinchflat.Downloading.DownloadingHelpers
+ alias Pinchflat.FastIndexing.FastIndexingWorker
+ alias Pinchflat.Downloading.DownloadOptionBuilder
alias Pinchflat.YtDlp.Media, as: YtDlpMedia
+ @doc """
+ Kicks off a new fast indexing task for a source. This will delete any existing fast indexing
+ tasks for the source before starting a new one.
+
+ Returns {:ok, %Task{}}
+ """
+ def kickoff_indexing_task(%Source{} = source) do
+ Tasks.delete_pending_tasks_for(source, "FastIndexingWorker", include_executing: true)
+ FastIndexingWorker.kickoff_with_task(source)
+ end
+
@doc """
Fetches new media IDs for a source from YT's API or RSS, indexes them, and kicks off downloading
tasks for any pending media items. See comments in `FastIndexingWorker` for more info on the
@@ -26,7 +41,11 @@ defmodule Pinchflat.FastIndexing.FastIndexingHelpers do
Returns [%MediaItem{}] where each item is a new media item that was created _but not necessarily
downloaded_.
"""
- def kickoff_download_tasks_from_youtube_rss_feed(%Source{} = source) do
+ def index_and_kickoff_downloads(%Source{} = source) do
+ # The media_profile is needed to determine the quality options to _then_ determine a more
+ # accurate predicted filepath
+ source = Repo.preload(source, [:media_profile])
+
{:ok, media_ids} = get_recent_media_ids(source)
existing_media_items = list_media_items_by_media_id_for(source, media_ids)
new_media_ids = media_ids -- Enum.map(existing_media_items, & &1.media_id)
@@ -35,6 +54,7 @@ defmodule Pinchflat.FastIndexing.FastIndexingHelpers do
Enum.map(new_media_ids, fn media_id ->
case create_media_item_from_media_id(source, media_id) do
{:ok, media_item} ->
+ DownloadingHelpers.kickoff_download_if_pending(media_item, priority: 0)
media_item
err ->
@@ -43,7 +63,9 @@ defmodule Pinchflat.FastIndexing.FastIndexingHelpers do
end
end)
- DownloadingHelpers.enqueue_pending_download_tasks(source)
+ # Pick up any stragglers. Intentionally has a lower priority than the per-media item
+ # kickoff above
+ DownloadingHelpers.enqueue_pending_download_tasks(source, priority: 1)
Enum.filter(maybe_new_media_items, & &1)
end
@@ -67,8 +89,16 @@ defmodule Pinchflat.FastIndexing.FastIndexingHelpers do
defp create_media_item_from_media_id(source, media_id) do
url = "https://www.youtube.com/watch?v=#{media_id}"
+ # This is set to :metadata instead of :indexing since this happens _after_ the
+ # actual indexing process. In reality, slow indexing is the only thing that
+ # should be using :indexing.
+ should_use_cookies = Sources.use_cookies?(source, :metadata)
- case YtDlpMedia.get_media_attributes(url) do
+ command_opts =
+ [output: DownloadOptionBuilder.build_output_path_for(source)] ++
+ DownloadOptionBuilder.build_quality_options_for(source)
+
+ case YtDlpMedia.get_media_attributes(url, command_opts, use_cookies: should_use_cookies) do
{:ok, media_attrs} ->
Media.create_media_item_from_backend_attrs(source, media_attrs)
diff --git a/lib/pinchflat/fast_indexing/fast_indexing_worker.ex b/lib/pinchflat/fast_indexing/fast_indexing_worker.ex
index 368da17..ed83bf3 100644
--- a/lib/pinchflat/fast_indexing/fast_indexing_worker.ex
+++ b/lib/pinchflat/fast_indexing/fast_indexing_worker.ex
@@ -38,8 +38,8 @@ defmodule Pinchflat.FastIndexing.FastIndexingWorker do
Order of operations:
1. FastIndexingWorker (this module) periodically checks the YouTube RSS feed for new media.
- with `FastIndexingHelpers.kickoff_download_tasks_from_youtube_rss_feed`
- 2. If the above `kickoff_download_tasks_from_youtube_rss_feed` finds new media items in the RSS feed,
+ with `FastIndexingHelpers.index_and_kickoff_downloads`
+ 2. If the above `index_and_kickoff_downloads` finds new media items in the RSS feed,
it indexes them with a yt-dlp call to create the media item records then kicks off downloading
tasks (MediaDownloadWorker) for any new media items _that should be downloaded_.
3. Once downloads are kicked off, this worker sends a notification to the apprise server if applicable
@@ -67,7 +67,7 @@ defmodule Pinchflat.FastIndexing.FastIndexingWorker do
new_media_items =
source
- |> FastIndexingHelpers.kickoff_download_tasks_from_youtube_rss_feed()
+ |> FastIndexingHelpers.index_and_kickoff_downloads()
|> Enum.filter(&Media.pending_download?(&1))
if source.download_media do
diff --git a/lib/pinchflat/fast_indexing/youtube_api.ex b/lib/pinchflat/fast_indexing/youtube_api.ex
index 95e6b39..04c7326 100644
--- a/lib/pinchflat/fast_indexing/youtube_api.ex
+++ b/lib/pinchflat/fast_indexing/youtube_api.ex
@@ -12,6 +12,8 @@ defmodule Pinchflat.FastIndexing.YoutubeApi do
@behaviour YoutubeBehaviour
+ @agent_name {:global, __MODULE__.KeyIndex}
+
@doc """
Determines if the YouTube API is enabled for fast indexing by checking
if the user has an API key set
@@ -19,7 +21,7 @@ defmodule Pinchflat.FastIndexing.YoutubeApi do
Returns boolean()
"""
@impl YoutubeBehaviour
- def enabled?(), do: is_binary(api_key())
+ def enabled?, do: Enum.any?(api_keys())
@doc """
Fetches the recent media IDs from the YouTube API for a given source.
@@ -74,8 +76,45 @@ defmodule Pinchflat.FastIndexing.YoutubeApi do
|> FunctionUtils.wrap_ok()
end
- defp api_key do
- Settings.get!(:youtube_api_key)
+ defp api_keys do
+ case Settings.get!(:youtube_api_key) do
+ nil ->
+ []
+
+ keys ->
+ keys
+ |> String.split(",")
+ |> Enum.map(&String.trim/1)
+ |> Enum.reject(&(&1 == ""))
+ end
+ end
+
+ defp get_or_start_api_key_agent do
+ case Agent.start(fn -> 0 end, name: @agent_name) do
+ {:ok, pid} -> pid
+ {:error, {:already_started, pid}} -> pid
+ end
+ end
+
+ # Gets the next API key in round-robin fashion
+ defp next_api_key do
+ keys = api_keys()
+
+ case keys do
+ [] ->
+ nil
+
+ keys ->
+ pid = get_or_start_api_key_agent()
+
+ current_index =
+ Agent.get_and_update(pid, fn current ->
+ {current, rem(current + 1, length(keys))}
+ end)
+
+ Logger.debug("Using YouTube API key: #{Enum.at(keys, current_index)}")
+ Enum.at(keys, current_index)
+ end
end
defp construct_api_endpoint(playlist_id) do
@@ -83,7 +122,7 @@ defmodule Pinchflat.FastIndexing.YoutubeApi do
property_type = "contentDetails"
max_results = 50
- "#{api_base}?part=#{property_type}&maxResults=#{max_results}&playlistId=#{playlist_id}&key=#{api_key()}"
+ "#{api_base}?part=#{property_type}&maxResults=#{max_results}&playlistId=#{playlist_id}&key=#{next_api_key()}"
end
defp http_client do
diff --git a/lib/pinchflat/lifecycle/user_scripts/command_runner.ex b/lib/pinchflat/lifecycle/user_scripts/command_runner.ex
index 8436c05..9a77ea5 100644
--- a/lib/pinchflat/lifecycle/user_scripts/command_runner.ex
+++ b/lib/pinchflat/lifecycle/user_scripts/command_runner.ex
@@ -12,6 +12,8 @@ defmodule Pinchflat.Lifecycle.UserScripts.CommandRunner do
@behaviour UserScriptCommandRunner
@event_types [
+ :app_init,
+ :media_pre_download,
:media_downloaded,
:media_deleted
]
@@ -22,24 +24,25 @@ defmodule Pinchflat.Lifecycle.UserScripts.CommandRunner do
This function will succeed in almost all cases, even if the user script command
failed - this is because I don't want bad scripts to stop the whole process.
- If something fails, it'll be logged.
+ If something fails, it'll be logged and returned BUT the tuple will always
+ start with {:ok, ...}.
The only things that can cause a true failure are passing in an invalid event
type or if the passed data cannot be encoded into JSON - both indicative of
failures in the development process.
- Returns :ok
+ Returns {:ok, :no_executable} | {:ok, output, exit_code}
"""
@impl UserScriptCommandRunner
def run(event_type, encodable_data) when event_type in @event_types do
case backend_executable() do
{:ok, :no_executable} ->
- :ok
+ {:ok, :no_executable}
{:ok, executable_path} ->
{:ok, encoded_data} = Phoenix.json_library().encode(encodable_data)
- {_output, _exit_code} =
+ {output, exit_code} =
CliUtils.wrap_cmd(
executable_path,
[to_string(event_type), encoded_data],
@@ -47,7 +50,7 @@ defmodule Pinchflat.Lifecycle.UserScripts.CommandRunner do
logging_arg_override: "[suppressed]"
)
- :ok
+ {:ok, output, exit_code}
end
end
@@ -62,7 +65,7 @@ defmodule Pinchflat.Lifecycle.UserScripts.CommandRunner do
if FilesystemUtils.exists_and_nonempty?(filepath) do
{:ok, filepath}
else
- Logger.warning("User scripts lifecyle file either not present or is empty. Skipping.")
+ Logger.info("User scripts lifecyle file either not present or is empty. Skipping.")
{:ok, :no_executable}
end
diff --git a/lib/pinchflat/media/file_syncing.ex b/lib/pinchflat/media/file_syncing.ex
new file mode 100644
index 0000000..7fb5b65
--- /dev/null
+++ b/lib/pinchflat/media/file_syncing.ex
@@ -0,0 +1,93 @@
+defmodule Pinchflat.Media.FileSyncing do
+ @moduledoc """
+ Functions for ensuring file state is accurately reflected in the database.
+ """
+
+ alias Pinchflat.Media
+ alias Pinchflat.Utils.MapUtils
+ alias Pinchflat.Media.MediaItem
+ alias Pinchflat.Utils.FilesystemUtils, as: FSUtils
+
+ @doc """
+ Deletes files that are no longer needed by a media item.
+
+ This means that if a media item has been updated, the old and new versions
+ can be passed and any files that are no longer needed will be deleted.
+
+ An example is a video that gets its quality upgraded and its name changes
+ between original download and re-download. The old file will exist on-disk
+ with the old name but the database entry will point to the new file. This
+ function can be used to delete the old file in this case.
+
+ Returns :ok
+ """
+ def delete_outdated_files(old_media_item, new_media_item) do
+ non_subtitle_keys = MediaItem.filepath_attributes() -- [:subtitle_filepaths]
+
+ old_non_subtitles = Map.take(old_media_item, non_subtitle_keys)
+ old_subtitles = MapUtils.from_nested_list(old_media_item.subtitle_filepaths)
+ new_non_subtitles = Map.take(new_media_item, non_subtitle_keys)
+ new_subtitles = MapUtils.from_nested_list(new_media_item.subtitle_filepaths)
+
+ handle_file_deletion(old_non_subtitles, new_non_subtitles)
+ handle_file_deletion(old_subtitles, new_subtitles)
+
+ :ok
+ end
+
+ @doc """
+ Nillifies any media item filepaths that don't exist on disk for a list of media items
+
+ returns [%MediaItem{}]
+ """
+ def sync_file_presence_on_disk(media_items) do
+ Enum.map(media_items, fn media_item ->
+ new_attributes = sync_media_item_files(media_item)
+ # Doing this one-by-one instead of batching since this process
+ # can take time and a batch could let MediaItem state get out of sync
+ {:ok, updated_media_item} = Media.update_media_item(media_item, new_attributes)
+
+ updated_media_item
+ end)
+ end
+
+ defp handle_file_deletion(old_attributes, new_attributes) do
+ # The logic:
+ # - A file should only be deleted if it exists and the new file is different
+ # - The new attributes are the ones we're interested in keeping
+ # - If the old attributes have a key that doesn't exist in the new attributes, don't touch it.
+ # This is good for archiving but may be unpopular for other users so this may change.
+
+ Enum.each(new_attributes, fn {key, new_filepath} ->
+ old_filepath = Map.get(old_attributes, key)
+ files_have_changed = old_filepath && new_filepath && old_filepath != new_filepath
+ files_exist_on_disk = files_have_changed && File.exists?(old_filepath) && File.exists?(new_filepath)
+
+ if files_exist_on_disk && !FSUtils.filepaths_reference_same_file?(old_filepath, new_filepath) do
+ FSUtils.delete_file_and_remove_empty_directories(old_filepath)
+ end
+ end)
+ end
+
+ defp sync_media_item_files(media_item) do
+ non_subtitle_keys = MediaItem.filepath_attributes() -- [:subtitle_filepaths]
+ subtitle_keys = MapUtils.from_nested_list(media_item.subtitle_filepaths)
+ non_subtitles = Map.take(media_item, non_subtitle_keys)
+
+ # This one is checking for the negative (ie: only update if the file doesn't exist)
+ new_non_subtitle_attrs =
+ Enum.reduce(non_subtitles, %{}, fn {key, filepath}, acc ->
+ if filepath && File.exists?(filepath), do: acc, else: Map.put(acc, key, nil)
+ end)
+
+ # This one is checking for the positive (ie: only update if the file exists)
+ # This is because subtitles, being an array type in the DB, are most easily updated
+ # by a full replacement rather than finding the actual diff
+ new_subtitle_attrs =
+ Enum.reduce(subtitle_keys, [], fn {key, filepath}, acc ->
+ if filepath && File.exists?(filepath), do: acc ++ [[key, filepath]], else: acc
+ end)
+
+ Map.put(new_non_subtitle_attrs, :subtitle_filepaths, new_subtitle_attrs)
+ end
+end
diff --git a/lib/pinchflat/media/file_syncing_worker.ex b/lib/pinchflat/media/file_syncing_worker.ex
new file mode 100644
index 0000000..fde7e73
--- /dev/null
+++ b/lib/pinchflat/media/file_syncing_worker.ex
@@ -0,0 +1,38 @@
+defmodule Pinchflat.Media.FileSyncingWorker do
+ @moduledoc false
+
+ use Oban.Worker,
+ queue: :local_data,
+ tags: ["sources", "local_data"]
+
+ alias __MODULE__
+ alias Pinchflat.Repo
+ alias Pinchflat.Tasks
+ alias Pinchflat.Sources
+ alias Pinchflat.Media.FileSyncing
+
+ @doc """
+ Starts the source file syncing worker.
+
+ Returns {:ok, %Task{}} | {:error, %Ecto.Changeset{}}
+ """
+ def kickoff_with_task(source, opts \\ []) do
+ %{id: source.id}
+ |> FileSyncingWorker.new(opts)
+ |> Tasks.create_job_with_task(source)
+ end
+
+ @doc """
+ Deletes a profile and optionally deletes its files
+
+ Returns :ok
+ """
+ @impl Oban.Worker
+ def perform(%Oban.Job{args: %{"id" => source_id}}) do
+ source = Repo.preload(Sources.get_source!(source_id), :media_items)
+
+ FileSyncing.sync_file_presence_on_disk(source.media_items)
+
+ :ok
+ end
+end
diff --git a/lib/pinchflat/media/media.ex b/lib/pinchflat/media/media.ex
index 191b28f..e1d8d7d 100644
--- a/lib/pinchflat/media/media.ex
+++ b/lib/pinchflat/media/media.ex
@@ -15,6 +15,9 @@ defmodule Pinchflat.Media do
alias Pinchflat.Lifecycle.UserScripts.CommandRunner, as: UserScriptRunner
+ # Some fields should only be set on insert and not on update.
+ @fields_to_drop_on_update [:playlist_index]
+
@doc """
Returns the list of media_items.
@@ -25,21 +28,10 @@ defmodule Pinchflat.Media do
end
@doc """
- Returns a list of media_items that are cullable based on the retention period
- of the source they belong to.
-
- Returns [%MediaItem{}, ...]
- """
- def list_cullable_media_items do
- MediaQuery.new()
- |> MediaQuery.require_assoc(:source)
- |> where(^MediaQuery.cullable())
- |> Repo.all()
- end
-
- @doc """
- Returns a list of media_items that are redownloadable based on the redownload delay
- of the media_profile their source belongs to.
+ Returns a list of media_items that are upgradeable based on the redownload delay
+ of the media_profile their source belongs to. In this context, upgradeable means
+ that it's been long enough since upload that the video may be in a higher quality
+ or have better sponsorblock segments (or similar).
The logic is that a media_item is past_redownload_delay if the media_item's uploaded_at is
at least redownload_delay_days ago AND `media_downloaded_at` - `redownload_delay_days`
@@ -52,10 +44,10 @@ defmodule Pinchflat.Media do
Returns [%MediaItem{}, ...]
"""
- def list_redownloadable_media_items do
+ def list_upgradeable_media_items do
MediaQuery.new()
|> MediaQuery.require_assoc(:media_profile)
- |> where(^MediaQuery.redownloadable())
+ |> where(^MediaQuery.upgradeable())
|> Repo.all()
end
@@ -147,7 +139,10 @@ defmodule Pinchflat.Media do
|> MediaItem.changeset(attrs)
|> Repo.insert(
on_conflict: [
- set: Map.to_list(attrs)
+ set:
+ attrs
+ |> Map.drop(@fields_to_drop_on_update)
+ |> Map.to_list()
],
conflict_target: [:source_id, :media_id]
)
@@ -159,8 +154,10 @@ defmodule Pinchflat.Media do
Returns {:ok, %MediaItem{}} | {:error, %Ecto.Changeset{}}
"""
def update_media_item(%MediaItem{} = media_item, attrs) do
+ update_attrs = Map.drop(attrs, @fields_to_drop_on_update)
+
media_item
- |> MediaItem.changeset(attrs)
+ |> MediaItem.changeset(update_attrs)
|> Repo.update()
end
@@ -177,7 +174,7 @@ defmodule Pinchflat.Media do
if delete_files do
{:ok, _} = do_delete_media_files(media_item)
- :ok = run_user_script(:media_deleted, media_item)
+ run_user_script(:media_deleted, media_item)
end
# Should delete these no matter what
@@ -200,7 +197,7 @@ defmodule Pinchflat.Media do
Tasks.delete_tasks_for(media_item)
{:ok, _} = do_delete_media_files(media_item)
- :ok = run_user_script(:media_deleted, media_item)
+ run_user_script(:media_deleted, media_item)
update_media_item(media_item, Map.merge(filepath_attrs, addl_attrs))
end
diff --git a/lib/pinchflat/media/media_item.ex b/lib/pinchflat/media/media_item.ex
index 99a53c0..ced8e8c 100644
--- a/lib/pinchflat/media/media_item.ex
+++ b/lib/pinchflat/media/media_item.ex
@@ -18,6 +18,8 @@ defmodule Pinchflat.Media.MediaItem do
alias Pinchflat.Media.MediaItemsSearchIndex
@allowed_fields [
+ # these fields are only captured on index
+ :playlist_index,
# these fields are captured on indexing (and again on download)
:title,
:media_id,
@@ -29,6 +31,7 @@ defmodule Pinchflat.Media.MediaItem do
:uploaded_at,
:upload_date_index,
:duration_seconds,
+ :predicted_media_filepath,
# these fields are captured only on download
:media_downloaded_at,
:media_filepath,
@@ -37,6 +40,7 @@ defmodule Pinchflat.Media.MediaItem do
:thumbnail_filepath,
:metadata_filepath,
:nfo_filepath,
+ :last_error,
# These are user or system controlled fields
:prevent_download,
:prevent_culling,
@@ -72,7 +76,9 @@ defmodule Pinchflat.Media.MediaItem do
field :uploaded_at, :utc_datetime
field :upload_date_index, :integer, default: 0
field :duration_seconds, :integer
+ field :playlist_index, :integer, default: 0
+ field :predicted_media_filepath, :string
field :media_filepath, :string
field :media_size_bytes, :integer
field :thumbnail_filepath, :string
@@ -83,6 +89,7 @@ defmodule Pinchflat.Media.MediaItem do
# Will very likely revisit because I can't leave well-enough alone.
field :subtitle_filepaths, {:array, {:array, :string}}, default: []
+ field :last_error, :string
field :prevent_download, :boolean, default: false
field :prevent_culling, :boolean, default: false
field :culled_at, :utc_datetime
@@ -107,6 +114,9 @@ defmodule Pinchflat.Media.MediaItem do
|> dynamic_default(:uuid, fn _ -> Ecto.UUID.generate() end)
|> update_upload_date_index()
|> validate_required(@required_fields)
+ # Validate that the title does NOT start with "youtube video #" since that indicates a restriction by YouTube.
+ # See issue #549 for more information.
+ |> validate_format(:title, ~r/^(?!youtube video #)/)
|> unique_constraint([:media_id, :source_id])
end
diff --git a/lib/pinchflat/media/media_query.ex b/lib/pinchflat/media/media_query.ex
index 2adb0ce..840e82c 100644
--- a/lib/pinchflat/media/media_query.ex
+++ b/lib/pinchflat/media/media_query.ex
@@ -33,7 +33,6 @@ defmodule Pinchflat.Media.MediaQuery do
def downloaded, do: dynamic([mi], not is_nil(mi.media_filepath))
def download_prevented, do: dynamic([mi], mi.prevent_download == true)
def culling_prevented, do: dynamic([mi], mi.prevent_culling == true)
- def culled, do: dynamic([mi], not is_nil(mi.culled_at))
def redownloaded, do: dynamic([mi], not is_nil(mi.media_redownloaded_at))
def upload_date_matches(other_date), do: dynamic([mi], fragment("date(?) = date(?)", mi.uploaded_at, ^other_date))
@@ -76,12 +75,20 @@ defmodule Pinchflat.Media.MediaQuery do
)
end
+ def meets_min_and_max_duration do
+ dynamic(
+ [mi, source],
+ (is_nil(source.min_duration_seconds) or fragment("duration_seconds >= ?", source.min_duration_seconds)) and
+ (is_nil(source.max_duration_seconds) or fragment("duration_seconds <= ?", source.max_duration_seconds))
+ )
+ end
+
def past_retention_period do
dynamic(
[mi, source],
fragment("""
IFNULL(retention_period_days, 0) > 0 AND
- DATETIME('now', '-' || retention_period_days || ' day') > media_downloaded_at
+ DATETIME(media_downloaded_at, '+' || retention_period_days || ' day') < DATETIME('now')
""")
)
end
@@ -93,8 +100,8 @@ defmodule Pinchflat.Media.MediaQuery do
# downloaded_at minus the redownload_delay_days is before the upload date
fragment("""
IFNULL(redownload_delay_days, 0) > 0 AND
- DATETIME('now', '-' || redownload_delay_days || ' day') > uploaded_at AND
- DATETIME(media_downloaded_at, '-' || redownload_delay_days || ' day') < uploaded_at
+ DATE('now', '-' || redownload_delay_days || ' day') > DATE(uploaded_at) AND
+ DATE(media_downloaded_at, '-' || redownload_delay_days || ' day') < DATE(uploaded_at)
""")
)
end
@@ -108,6 +115,15 @@ defmodule Pinchflat.Media.MediaQuery do
)
end
+ def deletable_based_on_source_cutoff do
+ dynamic(
+ [mi, source],
+ ^downloaded() and
+ not (^upload_date_after_source_cutoff()) and
+ not (^culling_prevented())
+ )
+ end
+
def pending do
dynamic(
[mi],
@@ -115,16 +131,16 @@ defmodule Pinchflat.Media.MediaQuery do
not (^download_prevented()) and
^upload_date_after_source_cutoff() and
^format_matching_profile_preference() and
- ^matches_source_title_regex()
+ ^matches_source_title_regex() and
+ ^meets_min_and_max_duration()
)
end
- def redownloadable do
+ def upgradeable do
dynamic(
[mi, source],
^downloaded() and
not (^download_prevented()) and
- not (^culled()) and
not (^redownloaded()) and
^past_redownload_delay()
)
diff --git a/lib/pinchflat/metadata/metadata_file_helpers.ex b/lib/pinchflat/metadata/metadata_file_helpers.ex
index c4320fa..842f6ef 100644
--- a/lib/pinchflat/metadata/metadata_file_helpers.ex
+++ b/lib/pinchflat/metadata/metadata_file_helpers.ex
@@ -9,6 +9,7 @@ defmodule Pinchflat.Metadata.MetadataFileHelpers do
needed
"""
+ alias Pinchflat.Sources
alias Pinchflat.Utils.FilesystemUtils
alias Pinchflat.YtDlp.Media, as: YtDlpMedia
@@ -62,11 +63,13 @@ defmodule Pinchflat.Metadata.MetadataFileHelpers do
Returns binary() | nil
"""
- def download_and_store_thumbnail_for(database_record) do
- yt_dlp_filepath = generate_filepath_for(database_record, "thumbnail.%(ext)s")
- real_filepath = generate_filepath_for(database_record, "thumbnail.jpg")
+ def download_and_store_thumbnail_for(media_item_with_preloads) do
+ yt_dlp_filepath = generate_filepath_for(media_item_with_preloads, "thumbnail.%(ext)s")
+ real_filepath = generate_filepath_for(media_item_with_preloads, "thumbnail.jpg")
+ command_opts = [output: yt_dlp_filepath]
+ addl_opts = [use_cookies: Sources.use_cookies?(media_item_with_preloads.source, :metadata)]
- case YtDlpMedia.download_thumbnail(database_record.original_url, output: yt_dlp_filepath) do
+ case YtDlpMedia.download_thumbnail(media_item_with_preloads.original_url, command_opts, addl_opts) do
{:ok, _} -> real_filepath
_ -> nil
end
@@ -124,6 +127,21 @@ defmodule Pinchflat.Metadata.MetadataFileHelpers do
end
end
+ @doc """
+ Attempts to determine the season and episode number from a media filepath.
+
+ Returns {:ok, {binary(), binary()}} | {:error, :indeterminable}
+ """
+ def season_and_episode_from_media_filepath(media_filepath) do
+ # matches s + 1 or more digits + e + 1 or more digits (case-insensitive)
+ season_episode_regex = ~r/s(\d+)e(\d+)/i
+
+ case Regex.scan(season_episode_regex, media_filepath) do
+ [[_, season, episode] | _] -> {:ok, {season, episode}}
+ _ -> {:error, :indeterminable}
+ end
+ end
+
defp generate_filepath_for(database_record, filename) do
Path.join([
metadata_directory_for(database_record),
diff --git a/lib/pinchflat/metadata/nfo_builder.ex b/lib/pinchflat/metadata/nfo_builder.ex
index 3db237f..38a26dd 100644
--- a/lib/pinchflat/metadata/nfo_builder.ex
+++ b/lib/pinchflat/metadata/nfo_builder.ex
@@ -6,8 +6,8 @@ defmodule Pinchflat.Metadata.NfoBuilder do
import Pinchflat.Utils.XmlUtils, only: [safe: 1]
- alias Pinchflat.Metadata.MetadataFileHelpers
alias Pinchflat.Utils.FilesystemUtils
+ alias Pinchflat.Metadata.MetadataFileHelpers
@doc """
Builds an NFO file for a media item (read: single "episode") and
@@ -15,12 +15,12 @@ defmodule Pinchflat.Metadata.NfoBuilder do
Returns the filepath of the NFO file.
"""
- def build_and_store_for_media_item(filepath, metadata) do
- nfo = build_for_media_item(metadata)
+ def build_and_store_for_media_item(nfo_filepath, metadata) do
+ nfo = build_for_media_item(nfo_filepath, metadata)
- FilesystemUtils.write_p!(filepath, nfo)
+ FilesystemUtils.write_p!(nfo_filepath, nfo)
- filepath
+ nfo_filepath
end
@doc """
@@ -37,10 +37,15 @@ defmodule Pinchflat.Metadata.NfoBuilder do
filepath
end
- defp build_for_media_item(metadata) do
+ defp build_for_media_item(nfo_filepath, metadata) do
upload_date = MetadataFileHelpers.parse_upload_date(metadata["upload_date"])
+ # NOTE: the filepath here isn't the path of the media item, it's the path that
+ # the NFO should be saved to. This works because the NFO's path is the same as
+ # the media's path, just with a different extension. If this ever changes I'll
+ # need to pass in the media item's path as well.
+ {season, episode} = determine_season_and_episode_number(nfo_filepath, upload_date)
+
# Cribbed from a combination of the Kodi wiki, ytdl-nfo, and ytdl-sub.
- # WHO NEEDS A FANCY XML PARSER ANYWAY?!
"""
@@ -49,8 +54,8 @@ defmodule Pinchflat.Metadata.NfoBuilder do
#{safe(metadata["id"])}
#{safe(metadata["description"])}
#{safe(upload_date)}
- #{safe(upload_date.year)}
- #{Calendar.strftime(upload_date, "%m%d")}
+ #{safe(season)}
+ #{episode}
YouTube
"""
@@ -67,4 +72,11 @@ defmodule Pinchflat.Metadata.NfoBuilder do
"""
end
+
+ defp determine_season_and_episode_number(filepath, upload_date) do
+ case MetadataFileHelpers.season_and_episode_from_media_filepath(filepath) do
+ {:ok, {season, episode}} -> {season, episode}
+ {:error, _} -> {upload_date.year, Calendar.strftime(upload_date, "%m%d")}
+ end
+ end
end
diff --git a/lib/pinchflat/metadata/source_image_parser.ex b/lib/pinchflat/metadata/source_image_parser.ex
index 9a0370c..26bacd7 100644
--- a/lib/pinchflat/metadata/source_image_parser.ex
+++ b/lib/pinchflat/metadata/source_image_parser.ex
@@ -20,29 +20,49 @@ defmodule Pinchflat.Metadata.SourceImageParser do
def store_source_images(base_directory, source_metadata) do
(source_metadata["thumbnails"] || [])
|> Enum.filter(&(&1["filepath"] != nil))
- |> select_useful_images()
+ |> select_useful_images(source_metadata)
|> Enum.map(&move_image(&1, base_directory))
|> Enum.into(%{})
end
- defp select_useful_images(images) do
+ defp select_useful_images(images, source_metadata) do
labelled_images =
- Enum.reduce(images, [], fn image_map, acc ->
+ Enum.reduce(images, %{}, fn image_map, acc ->
case image_map do
- %{"id" => "avatar_uncropped"} ->
- acc ++ [{:poster, :poster_filepath, image_map["filepath"]}]
-
- %{"id" => "banner_uncropped"} ->
- acc ++ [{:fanart, :fanart_filepath, image_map["filepath"]}]
-
- _ ->
- acc
+ %{"id" => "avatar_uncropped"} -> put_image_key(acc, :poster, image_map["filepath"])
+ %{"id" => "banner_uncropped"} -> put_image_key(acc, :fanart, image_map["filepath"])
+ _ -> acc
end
end)
labelled_images
- |> Enum.concat([{:banner, :banner_filepath, determine_best_banner(images)}])
- |> Enum.filter(fn {_, _, tmp_filepath} -> tmp_filepath end)
+ |> add_fallback_poster(source_metadata)
+ |> put_image_key(:banner, determine_best_banner(images))
+ |> Enum.filter(fn {_key, attrs} -> attrs.current_filepath end)
+ end
+
+ # If a poster is set, short-circuit and return the images as-is
+ defp add_fallback_poster(%{poster: _} = images, _), do: images
+
+ # If a poster is NOT set, see if we can find a suitable image to use as a fallback
+ defp add_fallback_poster(images, source_metadata) do
+ case source_metadata["entries"] do
+ nil -> images
+ [] -> images
+ [first_entry | _] -> add_poster_from_entry_thumbnail(images, first_entry)
+ end
+ end
+
+ defp add_poster_from_entry_thumbnail(images, entry) do
+ thumbnail =
+ (entry["thumbnails"] || [])
+ |> Enum.reverse()
+ |> Enum.find(& &1["filepath"])
+
+ case thumbnail do
+ nil -> images
+ _ -> put_image_key(images, :poster, thumbnail["filepath"])
+ end
end
defp determine_best_banner(images) do
@@ -58,12 +78,22 @@ defmodule Pinchflat.Metadata.SourceImageParser do
Map.get(best_candidate || %{}, "filepath")
end
- defp move_image({filename, source_attr_name, tmp_filepath}, base_directory) do
- extension = Path.extname(tmp_filepath)
- final_filepath = Path.join([base_directory, "#{filename}#{extension}"])
+ defp move_image({_key, attrs}, base_directory) do
+ extension = Path.extname(attrs.current_filepath)
+ final_filepath = Path.join([base_directory, "#{attrs.final_filename}#{extension}"])
- FilesystemUtils.cp_p!(tmp_filepath, final_filepath)
+ FilesystemUtils.cp_p!(attrs.current_filepath, final_filepath)
- {source_attr_name, final_filepath}
+ {attrs.attribute_name, final_filepath}
+ end
+
+ defp put_image_key(map, key, image) do
+ attribute_atom = String.to_existing_atom("#{key}_filepath")
+
+ Map.put(map, key, %{
+ attribute_name: attribute_atom,
+ final_filename: to_string(key),
+ current_filepath: image
+ })
end
end
diff --git a/lib/pinchflat/metadata/source_metadata_storage_worker.ex b/lib/pinchflat/metadata/source_metadata_storage_worker.ex
index 52ec810..feb793b 100644
--- a/lib/pinchflat/metadata/source_metadata_storage_worker.ex
+++ b/lib/pinchflat/metadata/source_metadata_storage_worker.ex
@@ -77,10 +77,8 @@ defmodule Pinchflat.Metadata.SourceMetadataStorageWorker do
defp fetch_source_metadata_and_images(series_directory, source) do
metadata_directory = MetadataFileHelpers.metadata_directory_for(source)
- tmp_output_path = "#{tmp_directory()}/#{StringUtils.random_string(16)}/source_image.%(ext)S"
- opts = [:write_all_thumbnails, convert_thumbnails: "jpg", output: tmp_output_path]
- {:ok, metadata} = MediaCollection.get_source_metadata(source.original_url, opts)
+ {:ok, metadata} = fetch_metadata_for_source(source)
metadata_image_attrs = SourceImageParser.store_source_images(metadata_directory, metadata)
if source.media_profile.download_source_images && series_directory do
@@ -94,7 +92,9 @@ defmodule Pinchflat.Metadata.SourceMetadataStorageWorker do
defp determine_series_directory(source) do
output_path = DownloadOptionBuilder.build_output_path_for(source)
- {:ok, %{filepath: filepath}} = MediaCollection.get_source_details(source.original_url, output: output_path)
+ runner_opts = [output: output_path]
+ addl_opts = [use_cookies: Sources.use_cookies?(source, :metadata)]
+ {:ok, %{filepath: filepath}} = MediaCollection.get_source_details(source.original_url, runner_opts, addl_opts)
case MetadataFileHelpers.series_directory_from_media_filepath(filepath) do
{:ok, series_directory} -> series_directory
@@ -110,6 +110,21 @@ defmodule Pinchflat.Metadata.SourceMetadataStorageWorker do
end
end
+ defp fetch_metadata_for_source(source) do
+ tmp_output_path = "#{tmp_directory()}/#{StringUtils.random_string(16)}/source_image.%(ext)S"
+ base_opts = [convert_thumbnails: "jpg", output: tmp_output_path]
+ should_use_cookies = Sources.use_cookies?(source, :metadata)
+
+ opts =
+ if source.collection_type == :channel do
+ base_opts ++ [:write_all_thumbnails, playlist_items: 0]
+ else
+ base_opts ++ [:write_thumbnail, playlist_items: 1]
+ end
+
+ MediaCollection.get_source_metadata(source.original_url, opts, use_cookies: should_use_cookies)
+ end
+
defp tmp_directory do
Application.get_env(:pinchflat, :tmpfile_directory)
end
diff --git a/lib/pinchflat/podcasts/opml_feed_builder.ex b/lib/pinchflat/podcasts/opml_feed_builder.ex
new file mode 100644
index 0000000..c764a66
--- /dev/null
+++ b/lib/pinchflat/podcasts/opml_feed_builder.ex
@@ -0,0 +1,40 @@
+defmodule Pinchflat.Podcasts.OpmlFeedBuilder do
+ @moduledoc """
+ Methods for building an OPML feed for a list of sources.
+ """
+
+ import Pinchflat.Utils.XmlUtils, only: [safe: 1]
+
+ alias PinchflatWeb.Router.Helpers, as: Routes
+
+ @doc """
+ Builds an OPML feed for a given list of sources.
+
+ Returns an XML document as a string.
+ """
+ def build(url_base, sources) do
+ sources_xml =
+ Enum.map(
+ sources,
+ &"""
+
+ """
+ )
+
+ """
+
+
+
+ All Sources
+
+
+ #{Enum.join(sources_xml, "\n")}
+
+
+ """
+ end
+
+ defp source_route(url_base, source) do
+ Path.join(url_base, "#{Routes.podcast_path(PinchflatWeb.Endpoint, :rss_feed, source.uuid)}.xml")
+ end
+end
diff --git a/lib/pinchflat/podcasts/podcast_helpers.ex b/lib/pinchflat/podcasts/podcast_helpers.ex
index 041107c..30ba17a 100644
--- a/lib/pinchflat/podcasts/podcast_helpers.ex
+++ b/lib/pinchflat/podcasts/podcast_helpers.ex
@@ -5,11 +5,25 @@ defmodule Pinchflat.Podcasts.PodcastHelpers do
"""
use Pinchflat.Media.MediaQuery
+ use Pinchflat.Sources.SourcesQuery
alias Pinchflat.Repo
alias Pinchflat.Metadata.MediaMetadata
alias Pinchflat.Metadata.SourceMetadata
+ @doc """
+ Returns a list of sources that are not marked for deletion.
+
+ Returns: [%Source{}]
+ """
+ def opml_sources() do
+ SourcesQuery.new()
+ |> select([s], %{custom_name: s.custom_name, uuid: s.uuid})
+ |> where([s], is_nil(s.marked_for_deletion_at))
+ |> order_by(asc: :custom_name)
+ |> Repo.all()
+ end
+
@doc """
Returns a list of media items that have been downloaded to disk
and have been proven to still exist there.
diff --git a/lib/pinchflat/profiles/media_profile.ex b/lib/pinchflat/profiles/media_profile.ex
index 3111640..90c93d6 100644
--- a/lib/pinchflat/profiles/media_profile.ex
+++ b/lib/pinchflat/profiles/media_profile.ex
@@ -26,8 +26,11 @@ defmodule Pinchflat.Profiles.MediaProfile do
sponsorblock_categories
shorts_behaviour
livestream_behaviour
+ audio_track
preferred_resolution
+ media_container
redownload_delay_days
+ marked_for_deletion_at
)a
@required_fields ~w(name output_path_template)a
@@ -52,7 +55,7 @@ defmodule Pinchflat.Profiles.MediaProfile do
field :embed_metadata, :boolean, default: false
field :download_nfo, :boolean, default: false
- field :sponsorblock_behaviour, Ecto.Enum, values: [:disabled, :remove], default: :disabled
+ field :sponsorblock_behaviour, Ecto.Enum, values: [:disabled, :mark, :remove], default: :disabled
field :sponsorblock_categories, {:array, :string}, default: []
# NOTE: these do NOT speed up indexing - the indexer still has to go
# through the entire collection to determine if a media is a short or
@@ -63,7 +66,11 @@ defmodule Pinchflat.Profiles.MediaProfile do
# See `build_format_clauses` in the Media context for more.
field :shorts_behaviour, Ecto.Enum, values: ~w(include exclude only)a, default: :include
field :livestream_behaviour, Ecto.Enum, values: ~w(include exclude only)a, default: :include
- field :preferred_resolution, Ecto.Enum, values: ~w(4320p 2160p 1080p 720p 480p 360p audio)a, default: :"1080p"
+ field :audio_track, :string
+ field :preferred_resolution, Ecto.Enum, values: ~w(4320p 2160p 1440p 1080p 720p 480p 360p audio)a, default: :"1080p"
+ field :media_container, :string, default: nil
+
+ field :marked_for_deletion_at, :utc_datetime
has_many :sources, Source
diff --git a/lib/pinchflat/profiles/media_profile_deletion_worker.ex b/lib/pinchflat/profiles/media_profile_deletion_worker.ex
new file mode 100644
index 0000000..230a085
--- /dev/null
+++ b/lib/pinchflat/profiles/media_profile_deletion_worker.ex
@@ -0,0 +1,38 @@
+defmodule Pinchflat.Profiles.MediaProfileDeletionWorker do
+ @moduledoc false
+
+ use Oban.Worker,
+ queue: :local_data,
+ tags: ["media_profiles", "local_data"]
+
+ require Logger
+
+ alias __MODULE__
+ alias Pinchflat.Profiles
+
+ @doc """
+ Starts the profile deletion worker. Does not attach it to a task like `kickoff_with_task/2`
+ since deletion also cancels all tasks for the profile
+
+ Returns {:ok, %Oban.Job{}} | {:error, %Ecto.Changeset{}}
+ """
+ def kickoff(profile, job_args \\ %{}, job_opts \\ []) do
+ %{id: profile.id}
+ |> Map.merge(job_args)
+ |> MediaProfileDeletionWorker.new(job_opts)
+ |> Oban.insert()
+ end
+
+ @doc """
+ Deletes a profile and optionally deletes its files
+
+ Returns :ok
+ """
+ @impl Oban.Worker
+ def perform(%Oban.Job{args: %{"id" => profile_id} = args}) do
+ delete_files = Map.get(args, "delete_files", false)
+ profile = Profiles.get_media_profile!(profile_id)
+
+ Profiles.delete_media_profile(profile, delete_files: delete_files)
+ end
+end
diff --git a/lib/pinchflat/profiles/profiles_query.ex b/lib/pinchflat/profiles/profiles_query.ex
new file mode 100644
index 0000000..caa1315
--- /dev/null
+++ b/lib/pinchflat/profiles/profiles_query.ex
@@ -0,0 +1,29 @@
+defmodule Pinchflat.Profiles.ProfilesQuery do
+ @moduledoc """
+ Query helpers for the Profiles context.
+
+ These methods are made to be one-ish liners used
+ to compose queries. Each method should strive to do
+ _one_ thing. These don't need to be tested as
+ they are just building blocks for other functionality
+ which, itself, will be tested.
+ """
+ import Ecto.Query, warn: false
+
+ alias Pinchflat.Profiles.MediaProfile
+
+ # This allows the module to be aliased and query methods to be used
+ # all in one go
+ # usage: use Pinchflat.Profiles.ProfilesQuery
+ defmacro __using__(_opts) do
+ quote do
+ import Ecto.Query, warn: false
+
+ alias unquote(__MODULE__)
+ end
+ end
+
+ def new do
+ MediaProfile
+ end
+end
diff --git a/lib/pinchflat/prom_ex.ex b/lib/pinchflat/prom_ex.ex
new file mode 100644
index 0000000..a46347d
--- /dev/null
+++ b/lib/pinchflat/prom_ex.ex
@@ -0,0 +1,40 @@
+defmodule Pinchflat.PromEx do
+ @moduledoc """
+ Configuration for the PromEx library which provides Prometheus metrics
+ """
+
+ use PromEx, otp_app: :pinchflat
+
+ alias PromEx.Plugins
+
+ @impl true
+ def plugins do
+ [
+ Plugins.Application,
+ Plugins.Beam,
+ {Plugins.Phoenix, router: PinchflatWeb.Router, endpoint: PinchflatWeb.Endpoint},
+ Plugins.Ecto,
+ Plugins.Oban,
+ Plugins.PhoenixLiveView
+ ]
+ end
+
+ @impl true
+ def dashboard_assigns do
+ [
+ default_selected_interval: "30s"
+ ]
+ end
+
+ @impl true
+ def dashboards do
+ [
+ {:prom_ex, "application.json"},
+ {:prom_ex, "beam.json"},
+ {:prom_ex, "phoenix.json"},
+ {:prom_ex, "ecto.json"},
+ {:prom_ex, "oban.json"},
+ {:prom_ex, "phoenix_live_view.json"}
+ ]
+ end
+end
diff --git a/lib/pinchflat/release.ex b/lib/pinchflat/release.ex
index 4547c0a..c6060c3 100644
--- a/lib/pinchflat/release.ex
+++ b/lib/pinchflat/release.ex
@@ -29,6 +29,8 @@ defmodule Pinchflat.Release do
[
"/config",
"/downloads",
+ "/etc/yt-dlp",
+ "/etc/yt-dlp/plugins",
Application.get_env(:pinchflat, :media_directory),
Application.get_env(:pinchflat, :tmpfile_directory),
Application.get_env(:pinchflat, :extras_directory),
diff --git a/lib/pinchflat/settings/setting.ex b/lib/pinchflat/settings/setting.ex
index d449ca0..f2a6b0a 100644
--- a/lib/pinchflat/settings/setting.ex
+++ b/lib/pinchflat/settings/setting.ex
@@ -14,15 +14,19 @@ defmodule Pinchflat.Settings.Setting do
:apprise_server,
:video_codec_preference,
:audio_codec_preference,
- :youtube_api_key
+ :youtube_api_key,
+ :extractor_sleep_interval_seconds,
+ :download_throughput_limit,
+ :restrict_filenames
]
- @required_fields ~w(
- onboarding
- pro_enabled
- video_codec_preference
- audio_codec_preference
- )a
+ @required_fields [
+ :onboarding,
+ :pro_enabled,
+ :video_codec_preference,
+ :audio_codec_preference,
+ :extractor_sleep_interval_seconds
+ ]
schema "settings" do
field :onboarding, :boolean, default: true
@@ -31,6 +35,11 @@ defmodule Pinchflat.Settings.Setting do
field :apprise_version, :string
field :apprise_server, :string
field :youtube_api_key, :string
+ field :route_token, :string
+ field :extractor_sleep_interval_seconds, :integer, default: 0
+ # This is a string because it accepts values like "100K" or "4.2M"
+ field :download_throughput_limit, :string
+ field :restrict_filenames, :boolean, default: false
field :video_codec_preference, :string
field :audio_codec_preference, :string
@@ -41,5 +50,6 @@ defmodule Pinchflat.Settings.Setting do
setting
|> cast(attrs, @allowed_fields)
|> validate_required(@required_fields)
+ |> validate_number(:extractor_sleep_interval_seconds, greater_than_or_equal_to: 0)
end
end
diff --git a/lib/pinchflat/slow_indexing/file_follower_server.ex b/lib/pinchflat/slow_indexing/file_follower_server.ex
index 91c514d..655a6da 100644
--- a/lib/pinchflat/slow_indexing/file_follower_server.ex
+++ b/lib/pinchflat/slow_indexing/file_follower_server.ex
@@ -106,7 +106,7 @@ defmodule Pinchflat.SlowIndexing.FileFollowerServer do
{:noreply, %{state | last_activity: DateTime.utc_now()}}
:eof ->
- Logger.debug("EOF reached, waiting before trying to read new lines")
+ Logger.debug("Current batch of media processed. Will check again in #{@poll_interval_ms}ms")
Process.send_after(self(), :read_new_lines, @poll_interval_ms)
{:noreply, state}
diff --git a/lib/pinchflat/slow_indexing/media_collection_indexing_worker.ex b/lib/pinchflat/slow_indexing/media_collection_indexing_worker.ex
index 5dbac3e..ae555ff 100644
--- a/lib/pinchflat/slow_indexing/media_collection_indexing_worker.ex
+++ b/lib/pinchflat/slow_indexing/media_collection_indexing_worker.ex
@@ -79,21 +79,21 @@ defmodule Pinchflat.SlowIndexing.MediaCollectionIndexingWorker do
case {source.index_frequency_minutes, source.last_indexed_at} do
{index_freq, _} when index_freq > 0 ->
# If the indexing is on a schedule simply run indexing and reschedule
- perform_indexing_and_notification(source)
+ perform_indexing_and_notification(source, was_forced: args["force"])
maybe_enqueue_fast_indexing_task(source)
reschedule_indexing(source)
{_, nil} ->
# If the source has never been indexed, index it once
# even if it's not meant to reschedule
- perform_indexing_and_notification(source)
+ perform_indexing_and_notification(source, was_forced: args["force"])
:ok
_ ->
# If the source HAS been indexed and is not meant to reschedule,
# perform a no-op (unless forced)
if args["force"] do
- perform_indexing_and_notification(source)
+ perform_indexing_and_notification(source, was_forced: true)
end
:ok
@@ -103,11 +103,11 @@ defmodule Pinchflat.SlowIndexing.MediaCollectionIndexingWorker do
Ecto.StaleEntryError -> Logger.info("#{__MODULE__} discarded: source #{source_id} stale")
end
- defp perform_indexing_and_notification(source) do
+ defp perform_indexing_and_notification(source, indexing_opts) do
apprise_server = Settings.get!(:apprise_server)
SourceNotifications.wrap_new_media_notification(apprise_server, source, fn ->
- SlowIndexingHelpers.index_and_enqueue_download_for_media_items(source)
+ SlowIndexingHelpers.index_and_enqueue_download_for_media_items(source, indexing_opts)
end)
end
diff --git a/lib/pinchflat/slow_indexing/slow_indexing_helpers.ex b/lib/pinchflat/slow_indexing/slow_indexing_helpers.ex
index 2434b92..8721b21 100644
--- a/lib/pinchflat/slow_indexing/slow_indexing_helpers.ex
+++ b/lib/pinchflat/slow_indexing/slow_indexing_helpers.ex
@@ -5,6 +5,8 @@ defmodule Pinchflat.SlowIndexing.SlowIndexingHelpers do
Many of these methods are made to be kickoff or be consumed by workers.
"""
+ use Pinchflat.Media.MediaQuery
+
require Logger
alias Pinchflat.Repo
@@ -14,30 +16,52 @@ defmodule Pinchflat.SlowIndexing.SlowIndexingHelpers do
alias Pinchflat.Sources.Source
alias Pinchflat.Media.MediaItem
alias Pinchflat.YtDlp.MediaCollection
+ alias Pinchflat.Utils.FilesystemUtils
alias Pinchflat.Downloading.DownloadingHelpers
alias Pinchflat.SlowIndexing.FileFollowerServer
+ alias Pinchflat.Downloading.DownloadOptionBuilder
alias Pinchflat.SlowIndexing.MediaCollectionIndexingWorker
alias Pinchflat.YtDlp.Media, as: YtDlpMedia
@doc """
- Starts tasks for indexing a source's media regardless of the source's indexing
- frequency. It's assumed the caller will check for indexing frequency.
+ Kills old indexing tasks and starts a new task to index the media collection.
- Returns {:ok, %Task{}}.
+ The job is delayed based on the source's `index_frequency_minutes` setting unless
+ one of the following is true:
+ - The `force` option is set to true
+ - The source has never been indexed before
+ - The source has been indexed before, but the last indexing job was more than
+ `index_frequency_minutes` ago
+
+ Returns {:ok, %Task{}}
"""
def kickoff_indexing_task(%Source{} = source, job_args \\ %{}, job_opts \\ []) do
- Tasks.delete_pending_tasks_for(source, "FastIndexingWorker")
- Tasks.delete_pending_tasks_for(source, "MediaCollectionIndexingWorker")
+ job_offset_seconds = if job_args[:force], do: 0, else: calculate_job_offset_seconds(source)
- MediaCollectionIndexingWorker.kickoff_with_task(source, job_args, job_opts)
+ Tasks.delete_pending_tasks_for(source, "MediaCollectionIndexingWorker", include_executing: true)
+
+ MediaCollectionIndexingWorker.kickoff_with_task(source, job_args, job_opts ++ [schedule_in: job_offset_seconds])
+ end
+
+ @doc """
+ A helper method to delete all indexing-related tasks for a source.
+ Optionally, you can include executing tasks in the deletion process.
+
+ Returns :ok
+ """
+ def delete_indexing_tasks(%Source{} = source, opts \\ []) do
+ include_executing = Keyword.get(opts, :include_executing, false)
+
+ Tasks.delete_pending_tasks_for(source, "FastIndexingWorker", include_executing: include_executing)
+ Tasks.delete_pending_tasks_for(source, "MediaCollectionIndexingWorker", include_executing: include_executing)
end
@doc """
Given a media source, creates (indexes) the media by creating media_items for each
media ID in the source. Afterward, kicks off a download task for each pending media
- item belonging to the source. You can't tell me the method name isn't descriptive!
- Returns a list of media items or changesets (if the media item couldn't be created).
+ item belonging to the source. Returns a list of media items or changesets
+ (if the media item couldn't be created).
Indexing is slow and usually returns a list of all media data at once for record creation.
To help with this, we use a file follower to watch the file that yt-dlp writes to
@@ -45,20 +69,33 @@ defmodule Pinchflat.SlowIndexing.SlowIndexingHelpers do
clarity to the user experience. This has a few things to be aware of which are documented
below in the file watcher setup method.
+ Additionally, in the case of a repeat index we create a download archive file that
+ contains some media IDs that we've indexed in the past. Note that this archive doesn't
+ contain the most recent IDs but rather a subset of IDs that are offset by some amount.
+ Practically, this means that we'll re-index a small handful of media that we've recently
+ indexed, but this is a good thing since it'll let us pick up on any recent changes to the
+ most recent media items.
+
+ We don't create a download archive for playlists (only channels), nor do we create one if
+ the indexing was forced by the user.
+
NOTE: downloads are only enqueued if the source is set to download media. Downloads are
also enqueued for ALL pending media items, not just the ones that were indexed in this
job run. This should ensure that any stragglers are caught if, for some reason, they
weren't enqueued or somehow got de-queued.
- Since indexing returns all media data EVERY TIME, we that that opportunity to update
- indexing metadata for media items that have already been created.
+ Available options:
+ - `was_forced`: Whether the indexing was forced by the user
Returns [%MediaItem{} | %Ecto.Changeset{}]
"""
- def index_and_enqueue_download_for_media_items(%Source{} = source) do
+ def index_and_enqueue_download_for_media_items(%Source{} = source, opts \\ []) do
+ # The media_profile is needed to determine the quality options to _then_ determine a more
+ # accurate predicted filepath
+ source = Repo.preload(source, [:media_profile])
# See the method definition below for more info on how file watchers work
# (important reading if you're not familiar with it)
- {:ok, media_attributes} = setup_file_watcher_and_kickoff_indexing(source)
+ {:ok, media_attributes} = setup_file_watcher_and_kickoff_indexing(source, opts)
# Reload because the source may have been updated during the (long-running) indexing process
# and important settings like `download_media` may have changed.
source = Repo.reload!(source)
@@ -90,11 +127,20 @@ defmodule Pinchflat.SlowIndexing.SlowIndexingHelpers do
# It attempts a graceful shutdown of the file follower after the indexing is done,
# but the FileFollowerServer will also stop itself if it doesn't see any activity
# for a sufficiently long time.
- defp setup_file_watcher_and_kickoff_indexing(source) do
+ defp setup_file_watcher_and_kickoff_indexing(source, opts) do
+ was_forced = Keyword.get(opts, :was_forced, false)
{:ok, pid} = FileFollowerServer.start_link()
handler = fn filepath -> setup_file_follower_watcher(pid, filepath, source) end
- result = MediaCollection.get_media_attributes_for_collection(source.original_url, file_listener_handler: handler)
+ should_use_cookies = Sources.use_cookies?(source, :indexing)
+
+ command_opts =
+ [output: DownloadOptionBuilder.build_output_path_for(source)] ++
+ DownloadOptionBuilder.build_quality_options_for(source) ++
+ build_download_archive_options(source, was_forced)
+
+ runner_opts = [file_listener_handler: handler, use_cookies: should_use_cookies]
+ result = MediaCollection.get_media_attributes_for_collection(source.original_url, command_opts, runner_opts)
FileFollowerServer.stop(pid)
@@ -131,4 +177,68 @@ defmodule Pinchflat.SlowIndexing.SlowIndexingHelpers do
changeset
end
end
+
+ # Find the difference between the current time and the last time the source was indexed
+ defp calculate_job_offset_seconds(%Source{last_indexed_at: nil}), do: 0
+
+ defp calculate_job_offset_seconds(source) do
+ offset_seconds = DateTime.diff(DateTime.utc_now(), source.last_indexed_at, :second)
+ index_frequency_seconds = source.index_frequency_minutes * 60
+
+ max(0, index_frequency_seconds - offset_seconds)
+ end
+
+ # The download archive file works in tandem with --break-on-existing to stop
+ # yt-dlp once we've hit media items we've already indexed. But we generate
+ # this list with a bit of an offset so we do intentionally re-scan some media
+ # items to pick up any recent changes (see `get_media_items_for_download_archive`).
+ #
+ # From there, we format the media IDs in the way that yt-dlp expects (ie: " ")
+ # and return the filepath to the caller.
+ defp create_download_archive_file(source) do
+ tmpfile = FilesystemUtils.generate_metadata_tmpfile(:txt)
+
+ archive_contents =
+ source
+ |> get_media_items_for_download_archive()
+ |> Enum.map_join("\n", fn media_item -> "youtube #{media_item.media_id}" end)
+
+ case File.write(tmpfile, archive_contents) do
+ :ok -> tmpfile
+ err -> err
+ end
+ end
+
+ # Sorting by `uploaded_at` is important because we want to re-index the most recent
+ # media items first but there is no guarantee of any correlation between ID and uploaded_at.
+ #
+ # The offset is important because we want to re-index some media items that we've
+ # recently indexed to pick up on any changes. The limit is because we want this mechanism
+ # to work even if, for example, the video we were using as a stopping point was deleted.
+ # It's not a perfect system, but it should do well enough.
+ #
+ # The chosen limit and offset are arbitary, independent, and vibes-based. Feel free to
+ # tweak as-needed
+ defp get_media_items_for_download_archive(source) do
+ MediaQuery.new()
+ |> where(^MediaQuery.for_source(source))
+ |> order_by(desc: :uploaded_at)
+ |> limit(50)
+ |> offset(20)
+ |> Repo.all()
+ end
+
+ # The download archive isn't useful for playlists (since those are ordered arbitrarily)
+ # and we don't want to use it if the indexing was forced by the user. In other words,
+ # only create an archive for channels that are being indexed as part of their regular
+ # indexing schedule. The first indexing pass should also not create an archive.
+ defp build_download_archive_options(%Source{collection_type: :playlist}, _was_forced), do: []
+ defp build_download_archive_options(%Source{last_indexed_at: nil}, _was_forced), do: []
+ defp build_download_archive_options(_source, true), do: []
+
+ defp build_download_archive_options(source, _was_forced) do
+ archive_file = create_download_archive_file(source)
+
+ [:break_on_existing, download_archive: archive_file]
+ end
end
diff --git a/lib/pinchflat/sources/source.ex b/lib/pinchflat/sources/source.ex
index 418c828..00b4776 100644
--- a/lib/pinchflat/sources/source.ex
+++ b/lib/pinchflat/sources/source.ex
@@ -15,6 +15,7 @@ defmodule Pinchflat.Sources.Source do
alias Pinchflat.Metadata.SourceMetadata
@allowed_fields ~w(
+ enabled
collection_name
collection_id
collection_type
@@ -27,6 +28,7 @@ defmodule Pinchflat.Sources.Source do
series_directory
index_frequency_minutes
fast_index
+ cookie_behaviour
download_media
last_indexed_at
original_url
@@ -35,6 +37,9 @@ defmodule Pinchflat.Sources.Source do
title_filter_regex
media_profile_id
output_path_template_override
+ marked_for_deletion_at
+ min_duration_seconds
+ max_duration_seconds
)a
# Expensive API calls are made when a source is inserted/updated so
@@ -60,6 +65,7 @@ defmodule Pinchflat.Sources.Source do
)a
schema "sources" do
+ field :enabled, :boolean, default: true
# This is _not_ used as the primary key or internally in the database
# relations. This is only used to prevent an enumeration attack on the streaming
# and RSS feed endpoints since those _must_ be public (ie: no basic auth)
@@ -72,6 +78,7 @@ defmodule Pinchflat.Sources.Source do
field :collection_type, Ecto.Enum, values: [:channel, :playlist]
field :index_frequency_minutes, :integer, default: 60 * 24
field :fast_index, :boolean, default: false
+ field :cookie_behaviour, Ecto.Enum, values: [:disabled, :when_needed, :all_operations], default: :disabled
field :download_media, :boolean, default: true
field :last_indexed_at, :utc_datetime
# Only download media items that were published after this date
@@ -81,12 +88,17 @@ defmodule Pinchflat.Sources.Source do
field :title_filter_regex, :string
field :output_path_template_override, :string
+ field :min_duration_seconds, :integer
+ field :max_duration_seconds, :integer
+
field :series_directory, :string
field :nfo_filepath, :string
field :poster_filepath, :string
field :fanart_filepath, :string
field :banner_filepath, :string
+ field :marked_for_deletion_at, :utc_datetime
+
belongs_to :media_profile, MediaProfile
has_one :metadata, SourceMetadata, on_replace: :update
@@ -113,6 +125,7 @@ defmodule Pinchflat.Sources.Source do
|> dynamic_default(:uuid, fn _ -> Ecto.UUID.generate() end)
|> validate_required(required_fields)
|> validate_title_regex()
+ |> validate_min_and_max_durations()
|> validate_number(:retention_period_days, greater_than_or_equal_to: 0)
# Ensures it ends with `.{{ ext }}` or `.%(ext)s` or similar (with a little wiggle room)
|> validate_format(:output_path_template_override, MediaProfile.ext_regex(), message: "must end with .{{ ext }}")
@@ -159,6 +172,17 @@ defmodule Pinchflat.Sources.Source do
defp validate_title_regex(changeset), do: changeset
+ defp validate_min_and_max_durations(changeset) do
+ min_duration = get_change(changeset, :min_duration_seconds)
+ max_duration = get_change(changeset, :max_duration_seconds)
+
+ case {min_duration, max_duration} do
+ {min, max} when is_nil(min) or is_nil(max) -> changeset
+ {min, max} when min >= max -> add_error(changeset, :max_duration_seconds, "must be greater than minumum duration")
+ _ -> changeset
+ end
+ end
+
defimpl Jason.Encoder, for: Source do
def encode(value, opts) do
value
diff --git a/lib/pinchflat/sources/source_deletion_worker.ex b/lib/pinchflat/sources/source_deletion_worker.ex
new file mode 100644
index 0000000..9c36837
--- /dev/null
+++ b/lib/pinchflat/sources/source_deletion_worker.ex
@@ -0,0 +1,38 @@
+defmodule Pinchflat.Sources.SourceDeletionWorker do
+ @moduledoc false
+
+ use Oban.Worker,
+ queue: :local_data,
+ tags: ["sources", "local_data"]
+
+ require Logger
+
+ alias __MODULE__
+ alias Pinchflat.Sources
+
+ @doc """
+ Starts the source deletion worker. Does not attach it to a task like `kickoff_with_task/2`
+ since deletion also cancels all tasks for the source
+
+ Returns {:ok, %Task{}} | {:error, %Ecto.Changeset{}}
+ """
+ def kickoff(source, job_args \\ %{}, job_opts \\ []) do
+ %{id: source.id}
+ |> Map.merge(job_args)
+ |> SourceDeletionWorker.new(job_opts)
+ |> Oban.insert()
+ end
+
+ @doc """
+ Deletes a source and optionally deletes its files
+
+ Returns :ok
+ """
+ @impl Oban.Worker
+ def perform(%Oban.Job{args: %{"id" => source_id} = args}) do
+ delete_files = Map.get(args, "delete_files", false)
+ source = Sources.get_source!(source_id)
+
+ Sources.delete_source(source, delete_files: delete_files)
+ end
+end
diff --git a/lib/pinchflat/sources/sources.ex b/lib/pinchflat/sources/sources.ex
index 6cd45c1..edd37f1 100644
--- a/lib/pinchflat/sources/sources.ex
+++ b/lib/pinchflat/sources/sources.ex
@@ -15,8 +15,8 @@ defmodule Pinchflat.Sources do
alias Pinchflat.Metadata.SourceMetadata
alias Pinchflat.Utils.FilesystemUtils
alias Pinchflat.Downloading.DownloadingHelpers
- alias Pinchflat.FastIndexing.FastIndexingWorker
alias Pinchflat.SlowIndexing.SlowIndexingHelpers
+ alias Pinchflat.FastIndexing.FastIndexingHelpers
alias Pinchflat.Metadata.SourceMetadataStorageWorker
@doc """
@@ -32,6 +32,19 @@ defmodule Pinchflat.Sources do
source.output_path_template_override || media_profile.output_path_template
end
+ @doc """
+ Returns a boolean indicating whether or not cookies should be used for a given operation.
+
+ Returns boolean()
+ """
+ def use_cookies?(source, operation) when operation in [:indexing, :downloading, :metadata, :error_recovery] do
+ case source.cookie_behaviour do
+ :disabled -> false
+ :all_operations -> true
+ :when_needed -> operation in [:indexing, :error_recovery]
+ end
+ end
+
@doc """
Returns the list of sources. Returns [%Source{}, ...]
"""
@@ -180,11 +193,22 @@ defmodule Pinchflat.Sources do
end
defp add_source_details_to_changeset(source, changeset) do
- case MediaCollection.get_source_details(changeset.changes.original_url) do
+ original_url = changeset.changes.original_url
+ should_use_cookies = Ecto.Changeset.get_field(changeset, :cookie_behaviour) == :all_operations
+ # Skipping sleep interval since this is UI blocking and we want to keep this as fast as possible
+ addl_opts = [use_cookies: should_use_cookies, skip_sleep_interval: true]
+
+ case MediaCollection.get_source_details(original_url, [], addl_opts) do
{:ok, source_details} ->
add_source_details_by_collection_type(source, changeset, source_details)
- {:error, runner_error, _status_code} ->
+ err ->
+ runner_error =
+ case err do
+ {:error, error_msg, _status_code} -> error_msg
+ {:error, error_msg} -> error_msg
+ end
+
Ecto.Changeset.add_error(
changeset,
:original_url,
@@ -247,19 +271,40 @@ defmodule Pinchflat.Sources do
end
end
- # If the source is NOT new (ie: updated) and the download_media flag has changed,
+ # If the source is new (ie: not persisted), do nothing
+ defp maybe_handle_media_tasks(%{data: %{__meta__: %{state: state}}}, _source) when state != :loaded do
+ :ok
+ end
+
+ # If the source is NOT new (ie: updated),
# enqueue or dequeue media download tasks as necessary.
defp maybe_handle_media_tasks(changeset, source) do
- case {changeset.data, changeset.changes} do
- {%{__meta__: %{state: :loaded}}, %{download_media: true}} ->
+ current_changes = changeset.changes
+ applied_changes = Ecto.Changeset.apply_changes(changeset)
+
+ # We need both current_changes and applied_changes to determine
+ # the course of action to take. For example, we only care if a source is supposed
+ # to be `enabled` or not - we don't care if that information comes from the
+ # current changes or if that's how it already was in the database.
+ # Rephrased, we're essentially using it in place of `get_field/2`
+ case {current_changes, applied_changes} do
+ {%{download_media: true}, %{enabled: true}} ->
DownloadingHelpers.enqueue_pending_download_tasks(source)
- {%{__meta__: %{state: :loaded}}, %{download_media: false}} ->
+ {%{enabled: true}, %{download_media: true}} ->
+ DownloadingHelpers.enqueue_pending_download_tasks(source)
+
+ {%{download_media: false}, _} ->
+ DownloadingHelpers.dequeue_pending_download_tasks(source)
+
+ {%{enabled: false}, _} ->
DownloadingHelpers.dequeue_pending_download_tasks(source)
_ ->
- :ok
+ nil
end
+
+ :ok
end
defp maybe_run_indexing_task(changeset, source) do
@@ -268,6 +313,10 @@ defmodule Pinchflat.Sources do
%{__meta__: %{state: :built}} ->
SlowIndexingHelpers.kickoff_indexing_task(source)
+ if Ecto.Changeset.get_field(changeset, :fast_index) do
+ FastIndexingHelpers.kickoff_indexing_task(source)
+ end
+
# If the record has been persisted, only run indexing if the
# indexing frequency has been changed and is now greater than 0
%{__meta__: %{state: :loaded}} ->
@@ -293,13 +342,22 @@ defmodule Pinchflat.Sources do
end
defp maybe_update_slow_indexing_task(changeset, source) do
- case changeset.changes do
- %{index_frequency_minutes: mins} when mins > 0 ->
+ # See comment in `maybe_handle_media_tasks` as to why we need these
+ current_changes = changeset.changes
+ applied_changes = Ecto.Changeset.apply_changes(changeset)
+
+ case {current_changes, applied_changes} do
+ {%{index_frequency_minutes: mins}, %{enabled: true}} when mins > 0 ->
SlowIndexingHelpers.kickoff_indexing_task(source)
- %{index_frequency_minutes: _} ->
- Tasks.delete_pending_tasks_for(source, "FastIndexingWorker")
- Tasks.delete_pending_tasks_for(source, "MediaCollectionIndexingWorker")
+ {%{enabled: true}, %{index_frequency_minutes: mins}} when mins > 0 ->
+ SlowIndexingHelpers.kickoff_indexing_task(source)
+
+ {%{index_frequency_minutes: _}, _} ->
+ SlowIndexingHelpers.delete_indexing_tasks(source, include_executing: true)
+
+ {%{enabled: false}, _} ->
+ SlowIndexingHelpers.delete_indexing_tasks(source, include_executing: true)
_ ->
:ok
@@ -307,13 +365,25 @@ defmodule Pinchflat.Sources do
end
defp maybe_update_fast_indexing_task(changeset, source) do
- case changeset.changes do
- %{fast_index: true} ->
- Tasks.delete_pending_tasks_for(source, "FastIndexingWorker")
- FastIndexingWorker.kickoff_with_task(source)
+ # See comment in `maybe_handle_media_tasks` as to why we need these
+ current_changes = changeset.changes
+ applied_changes = Ecto.Changeset.apply_changes(changeset)
- %{fast_index: false} ->
- Tasks.delete_pending_tasks_for(source, "FastIndexingWorker")
+ # This technically could be simplified since `maybe_update_slow_indexing_task`
+ # has some overlap re: deleting pending tasks, but I'm keeping it separate
+ # for clarity and explicitness.
+ case {current_changes, applied_changes} do
+ {%{fast_index: true}, %{enabled: true}} ->
+ FastIndexingHelpers.kickoff_indexing_task(source)
+
+ {%{enabled: true}, %{fast_index: true}} ->
+ FastIndexingHelpers.kickoff_indexing_task(source)
+
+ {%{fast_index: false}, _} ->
+ Tasks.delete_pending_tasks_for(source, "FastIndexingWorker", include_executing: true)
+
+ {%{enabled: false}, _} ->
+ Tasks.delete_pending_tasks_for(source, "FastIndexingWorker", include_executing: true)
_ ->
:ok
diff --git a/lib/pinchflat/tasks/tasks.ex b/lib/pinchflat/tasks/tasks.ex
index 7b94e3c..2dfef0a 100644
--- a/lib/pinchflat/tasks/tasks.ex
+++ b/lib/pinchflat/tasks/tasks.ex
@@ -53,20 +53,6 @@ defmodule Pinchflat.Tasks do
)
end
- @doc """
- Returns the list of pending tasks for a given record type and ID. Optionally allows you to specify
- which worker to include.
-
- Returns [%Task{}, ...]
- """
- def list_pending_tasks_for(record, worker_name \\ nil) do
- list_tasks_for(
- record,
- worker_name,
- [:available, :scheduled, :retryable]
- )
- end
-
@doc """
Gets a single task.
@@ -127,13 +113,13 @@ defmodule Pinchflat.Tasks do
@doc """
Deletes all tasks attached to a given record, cancelling any attached jobs.
- Optionally allows you to specify which worker to include.
+ Optionally allows you to specify which worker and job states to include.
Returns :ok
"""
- def delete_tasks_for(record, worker_name \\ nil) do
+ def delete_tasks_for(record, worker_name \\ nil, job_states \\ Oban.Job.states()) do
record
- |> list_tasks_for(worker_name)
+ |> list_tasks_for(worker_name, job_states)
|> Enum.each(&delete_task/1)
end
@@ -143,10 +129,12 @@ defmodule Pinchflat.Tasks do
Returns :ok
"""
- def delete_pending_tasks_for(record, worker_name \\ nil) do
- record
- |> list_pending_tasks_for(worker_name)
- |> Enum.each(&delete_task/1)
+ def delete_pending_tasks_for(record, worker_name \\ nil, opts \\ []) do
+ include_executing = Keyword.get(opts, :include_executing, false)
+ base_job_states = [:available, :scheduled, :retryable]
+ job_states = if include_executing, do: base_job_states ++ [:executing], else: base_job_states
+
+ delete_tasks_for(record, worker_name, job_states)
end
@doc """
diff --git a/lib/pinchflat/utils/filesystem_utils.ex b/lib/pinchflat/utils/filesystem_utils.ex
index 8652192..e7acb86 100644
--- a/lib/pinchflat/utils/filesystem_utils.ex
+++ b/lib/pinchflat/utils/filesystem_utils.ex
@@ -20,6 +20,24 @@ defmodule Pinchflat.Utils.FilesystemUtils do
end
end
+ @doc """
+ Checks if two filepaths reference the same file.
+
+ Useful if you have a relative and absolute filepath and want to be sure they're the same file.
+ Also works with symlinks.
+
+ Returns boolean()
+ """
+ def filepaths_reference_same_file?(filepath_1, filepath_2) do
+ {:ok, stat_1} = File.stat(filepath_1)
+ {:ok, stat_2} = File.stat(filepath_2)
+
+ identifier_1 = "#{stat_1.major_device}:#{stat_1.minor_device}:#{stat_1.inode}"
+ identifier_2 = "#{stat_2.major_device}:#{stat_2.minor_device}:#{stat_2.inode}"
+
+ identifier_1 == identifier_2
+ end
+
@doc """
Generates a temporary file and returns its path. The file is empty and has the given type.
Generates all the directories in the path if they don't exist.
@@ -27,8 +45,20 @@ defmodule Pinchflat.Utils.FilesystemUtils do
Returns binary()
"""
def generate_metadata_tmpfile(type) do
+ filename = StringUtils.random_string(64)
+ # This "namespacing" is more to help with development since things get
+ # weird in my editor when there are thousands of files in a single directory
+ first_two = String.slice(filename, 0..1)
+ second_two = String.slice(filename, 2..3)
tmpfile_directory = Application.get_env(:pinchflat, :tmpfile_directory)
- filepath = Path.join([tmpfile_directory, "#{StringUtils.random_string(64)}.#{type}"])
+
+ filepath =
+ Path.join([
+ tmpfile_directory,
+ first_two,
+ second_two,
+ "#{filename}.#{type}"
+ ])
:ok = write_p!(filepath, "")
diff --git a/lib/pinchflat/utils/map_utils.ex b/lib/pinchflat/utils/map_utils.ex
new file mode 100644
index 0000000..41f03a1
--- /dev/null
+++ b/lib/pinchflat/utils/map_utils.ex
@@ -0,0 +1,17 @@
+defmodule Pinchflat.Utils.MapUtils do
+ @moduledoc """
+ Utility methods for working with maps
+ """
+
+ @doc """
+ Converts a nested list of 2-element tuples or lists into a map.
+
+ Returns map()
+ """
+ def from_nested_list(list) do
+ Enum.reduce(list, %{}, fn
+ [key, value], acc -> Map.put(acc, key, value)
+ {key, value}, acc -> Map.put(acc, key, value)
+ end)
+ end
+end
diff --git a/lib/pinchflat/utils/number_utils.ex b/lib/pinchflat/utils/number_utils.ex
index b7128f8..d86002b 100644
--- a/lib/pinchflat/utils/number_utils.ex
+++ b/lib/pinchflat/utils/number_utils.ex
@@ -36,4 +36,18 @@ defmodule Pinchflat.Utils.NumberUtils do
end
end)
end
+
+ @doc """
+ Adds jitter to a number based on a percentage. Returns 0 if the number is less than or equal to 0.
+
+ Returns integer()
+ """
+ def add_jitter(num, jitter_percentage \\ 0.5)
+ def add_jitter(num, _jitter_percentage) when num <= 0, do: 0
+
+ def add_jitter(num, jitter_percentage) do
+ jitter = :rand.uniform(round(num * jitter_percentage))
+
+ round(num + jitter)
+ end
end
diff --git a/lib/pinchflat/utils/string_utils.ex b/lib/pinchflat/utils/string_utils.ex
index d96d6c1..66efc5e 100644
--- a/lib/pinchflat/utils/string_utils.ex
+++ b/lib/pinchflat/utils/string_utils.ex
@@ -26,19 +26,22 @@ defmodule Pinchflat.Utils.StringUtils do
end
@doc """
- Truncates a string to the given length and adds `...` if the string is longer than the given length.
- Will break on a word boundary. Nothing happens if the string is shorter than the given length.
+ Wraps a string in double braces. Useful as a UI helper now that
+ LiveView 1.0.0 allows `{}` for interpolation so now we can't use braces
+ directly in the view.
Returns binary()
"""
- def truncate(string, length) do
- if String.length(string) > length do
- string
- |> String.slice(0..(length - 1))
- |> String.replace(~r/\s+\S*$/, "")
- |> Kernel.<>("...")
- else
- string
- end
+ def double_brace(string) do
+ "{{ #{string} }}"
end
+
+ @doc """
+ Wraps a string in quotes if it's not already a string. Useful for working with
+ error messages whose types can vary.
+
+ Returns binary()
+ """
+ def wrap_string(message) when is_binary(message), do: message
+ def wrap_string(message), do: "#{inspect(message)}"
end
diff --git a/lib/pinchflat/yt_dlp/command_runner.ex b/lib/pinchflat/yt_dlp/command_runner.ex
index 30b2404..f574d30 100644
--- a/lib/pinchflat/yt_dlp/command_runner.ex
+++ b/lib/pinchflat/yt_dlp/command_runner.ex
@@ -3,7 +3,11 @@ defmodule Pinchflat.YtDlp.CommandRunner do
Runs yt-dlp commands using the `System.cmd/3` function
"""
+ require Logger
+
+ alias Pinchflat.Settings
alias Pinchflat.Utils.CliUtils
+ alias Pinchflat.Utils.NumberUtils
alias Pinchflat.YtDlp.YtDlpCommandRunner
alias Pinchflat.Utils.FilesystemUtils, as: FSUtils
@@ -18,23 +22,32 @@ defmodule Pinchflat.YtDlp.CommandRunner do
- :output_filepath - the path to save the output to. If not provided, a temporary
file will be created and used. Useful for if you need a reference to the file
for a file watcher.
+ - :use_cookies - if true, will add a cookie file to the command options. Will not
+ attach a cookie file if the user hasn't set one up.
+ - :skip_sleep_interval - if true, will not add the sleep interval options to the command.
+ Usually only used for commands that would be UI-blocking
Returns {:ok, binary()} | {:error, output, status}.
"""
@impl YtDlpCommandRunner
- def run(url, command_opts, output_template, addl_opts \\ []) do
- # This approach lets us mock the command for testing
- command = backend_executable()
+ def run(url, action_name, command_opts, output_template, addl_opts \\ []) do
+ Logger.debug("Running yt-dlp command for action: #{action_name}")
output_filepath = generate_output_filepath(addl_opts)
print_to_file_opts = [{:print_to_file, output_template}, output_filepath]
- user_configured_opts = cookie_file_options()
+ user_configured_opts = cookie_file_options(addl_opts) ++ rate_limit_options(addl_opts) ++ misc_options()
# These must stay in exactly this order, hence why I'm giving it its own variable.
all_opts = command_opts ++ print_to_file_opts ++ user_configured_opts ++ global_options()
formatted_command_opts = [url] ++ CliUtils.parse_options(all_opts)
- case CliUtils.wrap_cmd(command, formatted_command_opts, stderr_to_stdout: true) do
- {_, 0} ->
+ case CliUtils.wrap_cmd(backend_executable(), formatted_command_opts, stderr_to_stdout: true) do
+ # yt-dlp exit codes:
+ # 0 = Everything is successful
+ # 100 = yt-dlp must restart for update to complete
+ # 101 = Download cancelled by --max-downloads etc
+ # 2 = Error in user-provided options
+ # 1 = Any other error
+ {_, status} when status in [0, 101] ->
# IDEA: consider deleting the file after reading it. It's in the tmp dir, so it's not
# a huge deal, but it's still a good idea to clean up after ourselves.
# (even on error? especially on error?)
@@ -63,6 +76,24 @@ defmodule Pinchflat.YtDlp.CommandRunner do
end
end
+ @doc """
+ Updates yt-dlp to the latest version
+
+ Returns {:ok, binary()} | {:error, binary()}
+ """
+ @impl YtDlpCommandRunner
+ def update do
+ command = backend_executable()
+
+ case CliUtils.wrap_cmd(command, ["--update"]) do
+ {output, 0} ->
+ {:ok, String.trim(output)}
+
+ {output, _} ->
+ {:error, output}
+ end
+ end
+
defp generate_output_filepath(addl_opts) do
case Keyword.get(addl_opts, :output_filepath) do
nil -> FSUtils.generate_metadata_tmpfile(:json)
@@ -78,7 +109,14 @@ defmodule Pinchflat.YtDlp.CommandRunner do
]
end
- defp cookie_file_options do
+ defp cookie_file_options(addl_opts) do
+ case Keyword.get(addl_opts, :use_cookies) do
+ true -> add_cookie_file()
+ _ -> []
+ end
+ end
+
+ defp add_cookie_file do
base_dir = Application.get_env(:pinchflat, :extras_directory)
filename_options_map = %{cookies: "cookies.txt"}
@@ -93,6 +131,32 @@ defmodule Pinchflat.YtDlp.CommandRunner do
end)
end
+ defp rate_limit_options(addl_opts) do
+ throughput_limit = Settings.get!(:download_throughput_limit)
+ sleep_interval_opts = sleep_interval_opts(addl_opts)
+ throughput_option = if throughput_limit, do: [limit_rate: throughput_limit], else: []
+
+ throughput_option ++ sleep_interval_opts
+ end
+
+ defp sleep_interval_opts(addl_opts) do
+ sleep_interval = Settings.get!(:extractor_sleep_interval_seconds)
+
+ if sleep_interval <= 0 || Keyword.get(addl_opts, :skip_sleep_interval) do
+ []
+ else
+ [
+ sleep_requests: NumberUtils.add_jitter(sleep_interval),
+ sleep_interval: NumberUtils.add_jitter(sleep_interval),
+ sleep_subtitles: NumberUtils.add_jitter(sleep_interval)
+ ]
+ end
+ end
+
+ defp misc_options do
+ if Settings.get!(:restrict_filenames), do: [:restrict_filenames], else: []
+ end
+
defp backend_executable do
Application.get_env(:pinchflat, :yt_dlp_executable)
end
diff --git a/lib/pinchflat/yt_dlp/media.ex b/lib/pinchflat/yt_dlp/media.ex
index d6879e3..9abf8e5 100644
--- a/lib/pinchflat/yt_dlp/media.ex
+++ b/lib/pinchflat/yt_dlp/media.ex
@@ -11,7 +11,8 @@ defmodule Pinchflat.YtDlp.Media do
:livestream,
:short_form_content,
:uploaded_at,
- :duration_seconds
+ :duration_seconds,
+ :predicted_media_filepath
]
defstruct [
@@ -22,7 +23,9 @@ defmodule Pinchflat.YtDlp.Media do
:livestream,
:short_form_content,
:uploaded_at,
- :duration_seconds
+ :duration_seconds,
+ :playlist_index,
+ :predicted_media_filepath
]
alias __MODULE__
@@ -36,9 +39,9 @@ defmodule Pinchflat.YtDlp.Media do
Returns {:ok, map()} | {:error, any, ...}.
"""
def download(url, command_opts \\ [], addl_opts \\ []) do
- opts = [:no_simulate] ++ command_opts
+ all_command_opts = [:no_simulate] ++ command_opts
- with {:ok, output} <- backend_runner().run(url, opts, "after_move:%()j", addl_opts),
+ with {:ok, output} <- backend_runner().run(url, :download, all_command_opts, "after_move:%()j", addl_opts),
{:ok, parsed_json} <- Phoenix.json_library().decode(output) do
{:ok, parsed_json}
else
@@ -46,31 +49,53 @@ defmodule Pinchflat.YtDlp.Media do
end
end
+ @doc """
+ Determines if the media at the given URL is ready to be downloaded.
+ Common examples of non-downloadable media are upcoming or in-progress live streams.
+
+ Returns {:ok, :downloadable | :ignorable} | {:error, any}
+ """
+ def get_downloadable_status(url, addl_opts \\ []) do
+ action = :get_downloadable_status
+ command_opts = [:simulate, :skip_download]
+
+ case backend_runner().run(url, action, command_opts, "%(.{live_status})j", addl_opts) do
+ {:ok, output} ->
+ output
+ |> Phoenix.json_library().decode!()
+ |> parse_downloadable_status()
+
+ err ->
+ err
+ end
+ end
+
@doc """
Downloads a thumbnail for a single piece of media. Usually used for
downloading thumbnails for internal use
Returns {:ok, ""} | {:error, any, ...}.
"""
- def download_thumbnail(url, command_opts \\ []) do
- opts = [:no_simulate, :skip_download, :write_thumbnail, convert_thumbnail: "jpg"] ++ command_opts
+ def download_thumbnail(url, command_opts \\ [], addl_opts \\ []) do
+ all_command_opts = [:no_simulate, :skip_download, :write_thumbnail, convert_thumbnail: "jpg"] ++ command_opts
# NOTE: it doesn't seem like this command actually returns anything in `after_move` since
# we aren't downloading the main media file
- backend_runner().run(url, opts, "after_move:%()j")
+ backend_runner().run(url, :download_thumbnail, all_command_opts, "after_move:%()j", addl_opts)
end
@doc """
Returns a map representing the media at the given URL.
+ Optionally takes a list of additional command options to pass to yt-dlp
+ or configuration-related options to pass to the runner.
- Returns {:ok, [map()]} | {:error, any, ...}.
+ Returns {:ok, %Media{}} | {:error, any, ...}.
"""
- def get_media_attributes(url) do
- runner = Application.get_env(:pinchflat, :yt_dlp_runner)
- command_opts = [:simulate, :skip_download]
+ def get_media_attributes(url, command_opts \\ [], addl_opts \\ []) do
+ all_command_opts = [:simulate, :skip_download] ++ command_opts
output_template = indexing_output_template()
- case runner.run(url, command_opts, output_template) do
+ case backend_runner().run(url, :get_media_attributes, all_command_opts, output_template, addl_opts) do
{:ok, output} ->
output
|> Phoenix.json_library().decode!()
@@ -84,9 +109,13 @@ defmodule Pinchflat.YtDlp.Media do
@doc """
Returns the output template for yt-dlp's indexing command.
+
+ NOTE: playlist_index is really only useful for playlists that will never change their order.
+ NOTE: I've switched back to `original_url` (from `webpage_url`) since it's started indicating
+ if something is a short via the URL again
"""
def indexing_output_template do
- "%(.{id,title,was_live,webpage_url,description,aspect_ratio,duration,upload_date,timestamp})j"
+ "%(.{id,title,live_status,original_url,description,aspect_ratio,duration,upload_date,timestamp,playlist_index,filename})j"
end
@doc """
@@ -100,16 +129,18 @@ defmodule Pinchflat.YtDlp.Media do
media_id: response["id"],
title: response["title"],
description: response["description"],
- original_url: response["webpage_url"],
- livestream: !!response["was_live"],
+ original_url: response["original_url"],
+ livestream: !!response["live_status"] && response["live_status"] != "not_live",
duration_seconds: response["duration"] && round(response["duration"]),
- short_form_content: response["webpage_url"] && short_form_content?(response),
- uploaded_at: response["upload_date"] && parse_uploaded_at(response)
+ short_form_content: response["original_url"] && short_form_content?(response),
+ uploaded_at: response["upload_date"] && parse_uploaded_at(response),
+ playlist_index: response["playlist_index"] || 0,
+ predicted_media_filepath: response["filename"]
}
end
defp short_form_content?(response) do
- if String.contains?(response["webpage_url"], "/shorts/") do
+ if String.contains?(response["original_url"], "/shorts/") do
true
else
# Sometimes shorts are returned without /shorts/ in the URL,
@@ -120,7 +151,7 @@ defmodule Pinchflat.YtDlp.Media do
#
# These don't fail if duration or aspect_ratio are missing
# due to Elixir's comparison semantics
- response["duration"] <= 60 && response["aspect_ratio"] < 0.8
+ response["duration"] <= 180 && response["aspect_ratio"] <= 0.85
end
end
@@ -136,6 +167,16 @@ defmodule Pinchflat.YtDlp.Media do
defp parse_uploaded_at(%{"upload_date" => nil}), do: nil
defp parse_uploaded_at(response), do: MetadataFileHelpers.parse_upload_date(response["upload_date"])
+ defp parse_downloadable_status(response) do
+ case response["live_status"] do
+ status when status in ["is_live", "is_upcoming", "post_live"] -> {:ok, :ignorable}
+ status when status in ["was_live", "not_live"] -> {:ok, :downloadable}
+ # This preserves my tenuous support for non-youtube sources.
+ nil -> {:ok, :downloadable}
+ _ -> {:error, "Unknown live status: #{response["live_status"]}"}
+ end
+ end
+
defp backend_runner do
# This approach lets us mock the command for testing
Application.get_env(:pinchflat, :yt_dlp_runner)
diff --git a/lib/pinchflat/yt_dlp/media_collection.ex b/lib/pinchflat/yt_dlp/media_collection.ex
index 7e397ab..aa4abb3 100644
--- a/lib/pinchflat/yt_dlp/media_collection.ex
+++ b/lib/pinchflat/yt_dlp/media_collection.ex
@@ -11,29 +11,34 @@ defmodule Pinchflat.YtDlp.MediaCollection do
@doc """
Returns a list of maps representing the media in the collection.
+ Optionally takes a list of additional command options to pass to yt-dlp
+ or configuration-related options to pass to the runner.
- Options:
+ Runner Options:
- :file_listener_handler - a function that will be called with the path to the
file that will be written to when yt-dlp is done. This is useful for
setting up a file watcher to know when the file is ready to be read.
+ - :use_cookies - whether or not to use user-provided cookies when fetching the media details
Returns {:ok, [map()]} | {:error, any, ...}.
"""
- def get_media_attributes_for_collection(url, addl_opts \\ []) do
- runner = Application.get_env(:pinchflat, :yt_dlp_runner)
+ def get_media_attributes_for_collection(url, command_opts \\ [], addl_opts \\ []) do
# `ignore_no_formats_error` is necessary because yt-dlp will error out if
# the first video has not released yet (ie: is a premier). We don't care about
# available formats since we're just getting the media details
- command_opts = [:simulate, :skip_download, :ignore_no_formats_error, :no_warnings]
+ all_command_opts = [:simulate, :skip_download, :ignore_no_formats_error, :no_warnings] ++ command_opts
+ use_cookies = Keyword.get(addl_opts, :use_cookies, false)
output_template = YtDlpMedia.indexing_output_template()
output_filepath = FilesystemUtils.generate_metadata_tmpfile(:json)
file_listener_handler = Keyword.get(addl_opts, :file_listener_handler, false)
+ runner_opts = [output_filepath: output_filepath, use_cookies: use_cookies]
+ action = :get_media_attributes_for_collection
if file_listener_handler do
file_listener_handler.(output_filepath)
end
- case runner.run(url, command_opts, output_template, output_filepath: output_filepath) do
+ case backend_runner().run(url, action, all_command_opts, output_template, runner_opts) do
{:ok, output} ->
parsed_lines =
output
@@ -64,7 +69,7 @@ defmodule Pinchflat.YtDlp.MediaCollection do
Returns {:ok, map()} | {:error, any, ...}.
"""
- def get_source_details(source_url, addl_opts \\ []) do
+ def get_source_details(source_url, command_opts \\ [], addl_opts \\ []) do
# `ignore_no_formats_error` is necessary because yt-dlp will error out if
# the first video has not released yet (ie: is a premier). We don't care about
# available formats since we're just getting the source details
@@ -75,13 +80,15 @@ defmodule Pinchflat.YtDlp.MediaCollection do
playlist_end: 1
]
- command_opts = default_opts ++ addl_opts
+ all_command_opts = default_opts ++ command_opts
output_template = "%(.{channel,channel_id,playlist_id,playlist_title,filename})j"
+ action = :get_source_details
- with {:ok, output} <- backend_runner().run(source_url, command_opts, output_template),
+ with {:ok, output} <- backend_runner().run(source_url, action, all_command_opts, output_template, addl_opts),
{:ok, parsed_json} <- Phoenix.json_library().decode(output) do
{:ok, format_source_details(parsed_json)}
else
+ {:error, %Jason.DecodeError{}} -> {:error, "Error decoding JSON response"}
err -> err
end
end
@@ -99,13 +106,25 @@ defmodule Pinchflat.YtDlp.MediaCollection do
as a compressed blob for possible future use. That's why it's not getting formatted like
`get_source_details/1`
+ ! IMPORTANT ! - you'll always want to set `playlist_items: int` in `addl_opts.
+ This is great if you want to also return details about the videos in the playlists,
+ but it should be set in all cases to not over-fetch data.
+ For channels you should usually set this to 0 since channels return all the
+ metadata we need without needing to fetch the videos. On the other hand, playlists
+ don't return very useful images so you can set this to 1 to get the first video's
+ images, for instance.
+
Returns {:ok, map()} | {:error, any, ...}.
"""
- def get_source_metadata(source_url, addl_opts \\ []) do
- opts = [playlist_items: 0] ++ addl_opts
- output_template = "playlist:%()j"
+ def get_source_metadata(source_url, command_opts, addl_opts \\ []) do
+ # This only validates that the `playlist_items` key is present. It's otherwise unused
+ _playlist_items = Keyword.fetch!(command_opts, :playlist_items)
- with {:ok, output} <- backend_runner().run(source_url, opts, output_template),
+ all_command_opts = [:skip_download] ++ command_opts
+ output_template = "playlist:%()j"
+ action = :get_source_metadata
+
+ with {:ok, output} <- backend_runner().run(source_url, action, all_command_opts, output_template, addl_opts),
{:ok, parsed_json} <- Phoenix.json_library().decode(output) do
{:ok, parsed_json}
else
diff --git a/lib/pinchflat/yt_dlp/update_worker.ex b/lib/pinchflat/yt_dlp/update_worker.ex
new file mode 100644
index 0000000..2d9b43f
--- /dev/null
+++ b/lib/pinchflat/yt_dlp/update_worker.ex
@@ -0,0 +1,44 @@
+defmodule Pinchflat.YtDlp.UpdateWorker do
+ @moduledoc false
+
+ use Oban.Worker,
+ queue: :local_data,
+ tags: ["local_data"]
+
+ require Logger
+
+ alias __MODULE__
+ alias Pinchflat.Settings
+
+ @doc """
+ Starts the yt-dlp update worker. Does not attach it to a task like `kickoff_with_task/2`
+
+ Returns {:ok, %Oban.Job{}} | {:error, %Ecto.Changeset{}}
+ """
+ def kickoff do
+ Oban.insert(UpdateWorker.new(%{}))
+ end
+
+ @doc """
+ Updates yt-dlp and saves the version to the settings.
+
+ This worker is scheduled to run via the Oban Cron plugin as well as on app boot.
+
+ Returns :ok
+ """
+ @impl Oban.Worker
+ def perform(%Oban.Job{}) do
+ Logger.info("Updating yt-dlp")
+
+ yt_dlp_runner().update()
+
+ {:ok, yt_dlp_version} = yt_dlp_runner().version()
+ Settings.set(yt_dlp_version: yt_dlp_version)
+
+ :ok
+ end
+
+ defp yt_dlp_runner do
+ Application.get_env(:pinchflat, :yt_dlp_runner)
+ end
+end
diff --git a/lib/pinchflat/yt_dlp/yt_dlp_command_runner.ex b/lib/pinchflat/yt_dlp/yt_dlp_command_runner.ex
index 9b46a32..e5c770e 100644
--- a/lib/pinchflat/yt_dlp/yt_dlp_command_runner.ex
+++ b/lib/pinchflat/yt_dlp/yt_dlp_command_runner.ex
@@ -6,7 +6,8 @@ defmodule Pinchflat.YtDlp.YtDlpCommandRunner do
yt-dlp command.
"""
- @callback run(binary(), keyword(), binary()) :: {:ok, binary()} | {:error, binary(), integer()}
- @callback run(binary(), keyword(), binary(), keyword()) :: {:ok, binary()} | {:error, binary(), integer()}
+ @callback run(binary(), atom(), keyword(), binary()) :: {:ok, binary()} | {:error, binary(), integer()}
+ @callback run(binary(), atom(), keyword(), binary(), keyword()) :: {:ok, binary()} | {:error, binary(), integer()}
@callback version() :: {:ok, binary()} | {:error, binary()}
+ @callback update() :: {:ok, binary()} | {:error, binary()}
end
diff --git a/lib/pinchflat_web.ex b/lib/pinchflat_web.ex
index ef58d00..9401e3b 100644
--- a/lib/pinchflat_web.ex
+++ b/lib/pinchflat_web.ex
@@ -43,7 +43,7 @@ defmodule PinchflatWeb do
layouts: [html: PinchflatWeb.Layouts]
import Plug.Conn
- import PinchflatWeb.Gettext
+ use Gettext, backend: PinchflatWeb.Gettext
alias Pinchflat.Settings
alias PinchflatWeb.Layouts
@@ -94,12 +94,13 @@ defmodule PinchflatWeb do
# HTML escaping functionality
import Phoenix.HTML
# Core UI components and translation
- import PinchflatWeb.Gettext
+ use Gettext, backend: PinchflatWeb.Gettext
import PinchflatWeb.CoreComponents
import PinchflatWeb.CustomComponents.TabComponents
import PinchflatWeb.CustomComponents.TextComponents
import PinchflatWeb.CustomComponents.TableComponents
import PinchflatWeb.CustomComponents.ButtonComponents
+ import Pinchflat.Utils.StringUtils, only: [double_brace: 1]
alias Pinchflat.Settings
alias Pinchflat.Utils.StringUtils
diff --git a/lib/pinchflat_web/components/core_components.ex b/lib/pinchflat_web/components/core_components.ex
index 58f785c..37af104 100644
--- a/lib/pinchflat_web/components/core_components.ex
+++ b/lib/pinchflat_web/components/core_components.ex
@@ -15,10 +15,10 @@ defmodule PinchflatWeb.CoreComponents do
Icons are provided by [heroicons](https://heroicons.com). See `icon/1` for usage.
"""
use Phoenix.Component, global_prefixes: ~w(x-)
-
- import PinchflatWeb.Gettext
+ use Gettext, backend: PinchflatWeb.Gettext
alias Phoenix.LiveView.JS
+ alias PinchflatWeb.CustomComponents.TextComponents
@doc """
Renders a modal.
@@ -81,7 +81,7 @@ defmodule PinchflatWeb.CoreComponents do
- <%= render_slot(@inner_block) %>
+ {render_slot(@inner_block)}
@@ -125,9 +125,9 @@ defmodule PinchflatWeb.CoreComponents do
]}>
- <%= @title %>
+ {@title}
- <%= msg %>
+ {msg}
- <%= render_slot(@inner_block, f) %>
+ {render_slot(@inner_block, f)}
- <%= render_slot(action, f) %>
+ {render_slot(action, f)}
"""
@@ -296,11 +296,11 @@ defmodule PinchflatWeb.CoreComponents do
class={["rounded focus:ring-0", @inputclass]}
{@rest}
/>
- <%= @label %>
- <%= @label_suffix %>
+ {@label}
+ {@label_suffix}
- <.help :if={@help}><%= if @html_help, do: Phoenix.HTML.raw(@help), else: @help %>
- <.error :for={msg <- @errors}><%= msg %>
+ <.help :if={@help}>{if @html_help, do: Phoenix.HTML.raw(@help), else: @help}
+ <.error :for={msg <- @errors}>{msg}
"""
end
@@ -309,7 +309,7 @@ defmodule PinchflatWeb.CoreComponents do
~H"""
<.label for={@id}>
- <%= @label %>
<%= @label_suffix %>
+ {@label}
{@label_suffix}
@@ -322,12 +322,12 @@ defmodule PinchflatWeb.CoreComponents do
class={["rounded focus:ring-offset-0 ring-offset-0 focus:ring-0 h-5 w-5 ", @inputclass]}
/>
- <%= option_name %>
+ {option_name}
- <.help :if={@help}><%= if @html_help, do: Phoenix.HTML.raw(@help), else: @help %>
- <.error :for={msg <- @errors}><%= msg %>
+ <.help :if={@help}>{if @html_help, do: Phoenix.HTML.raw(@help), else: @help}
+ <.error :for={msg <- @errors}>{msg}
"""
end
@@ -339,14 +339,15 @@ defmodule PinchflatWeb.CoreComponents do
end)
~H"""
-
- <.label for={@id}>
- <%= @label %>
-
<%= @label_suffix %>
+
+ <.label :if={@label} for={@id}>
+ {@label}
+
{@label_suffix}
-
+
-
+ <%!-- This triggers a `change` event on the hidden input when the toggle is clicked --%>
+
- <.help :if={@help}><%= if @html_help, do: Phoenix.HTML.raw(@help), else: @help %>
- <.error :for={msg <- @errors}><%= msg %>
+ <.help :if={@help}>{if @html_help, do: Phoenix.HTML.raw(@help), else: @help}
+ <.error :for={msg <- @errors}>{msg}
"""
@@ -368,7 +369,7 @@ defmodule PinchflatWeb.CoreComponents do
~H"""
<.label :if={@label} for={@id}>
- <%= @label %>
<%= @label_suffix %>
+ {@label}
{@label_suffix}
- <%= @prompt %>
- <%= Phoenix.HTML.Form.options_for_select(@options, @value) %>
+ {@prompt}
+ {Phoenix.HTML.Form.options_for_select(@options, @value)}
- <%= render_slot(@inner_block) %>
+ {render_slot(@inner_block)}
- <.help :if={@help}><%= if @html_help, do: Phoenix.HTML.raw(@help), else: @help %>
- <.error :for={msg <- @errors}><%= msg %>
+ <.help :if={@help}>{if @html_help, do: Phoenix.HTML.raw(@help), else: @help}
+ <.error :for={msg <- @errors}>{msg}
"""
end
@@ -398,7 +399,7 @@ defmodule PinchflatWeb.CoreComponents do
~H"""
<.label for={@id}>
- <%= @label %><%= @label_suffix %>
+ {@label}{@label_suffix}
- <.help :if={@help}><%= if @html_help, do: Phoenix.HTML.raw(@help), else: @help %>
- <.error :for={msg <- @errors}><%= msg %>
+ <.help :if={@help}>{if @html_help, do: Phoenix.HTML.raw(@help), else: @help}
+ <.error :for={msg <- @errors}>{msg}
"""
end
@@ -423,7 +424,7 @@ defmodule PinchflatWeb.CoreComponents do
~H"""
<.label for={@id}>
- <%= @label %>
<%= @label_suffix %>
+ {@label}
{@label_suffix}
- <%= render_slot(@input_append) %>
+ {render_slot(@input_append)}
- <.help :if={@help}><%= if @html_help, do: Phoenix.HTML.raw(@help), else: @help %>
- <.error :for={msg <- @errors}><%= msg %>
+ <.help :if={@help}>{if @html_help, do: Phoenix.HTML.raw(@help), else: @help}
+ <.error :for={msg <- @errors}>{msg}
"""
end
@@ -456,7 +457,7 @@ defmodule PinchflatWeb.CoreComponents do
def help(assigns) do
~H"""
- <%= render_slot(@inner_block) %>
+ {render_slot(@inner_block)}
"""
end
@@ -470,7 +471,7 @@ defmodule PinchflatWeb.CoreComponents do
def label(assigns) do
~H"""
- <%= render_slot(@inner_block) %>
+ {render_slot(@inner_block)}
"""
end
@@ -484,7 +485,7 @@ defmodule PinchflatWeb.CoreComponents do
~H"""
<.icon name="hero-exclamation-circle-mini" class="mt-0.5 h-5 w-5 flex-none" />
- <%= render_slot(@inner_block) %>
+ {render_slot(@inner_block)}
"""
end
@@ -503,13 +504,13 @@ defmodule PinchflatWeb.CoreComponents do
"""
end
@@ -549,9 +550,9 @@ defmodule PinchflatWeb.CoreComponents do
- <%= col[:label] %>
+ {col[:label]}
- <%= gettext("Actions") %>
+ {gettext("Actions")}
@@ -569,7 +570,7 @@ defmodule PinchflatWeb.CoreComponents do
- <%= render_slot(col, @row_item.(row)) %>
+ {render_slot(col, @row_item.(row))}
@@ -577,7 +578,7 @@ defmodule PinchflatWeb.CoreComponents do
- <%= render_slot(action, @row_item.(row)) %>
+ {render_slot(action, @row_item.(row))}
@@ -606,8 +607,8 @@ defmodule PinchflatWeb.CoreComponents do
-
<%= item.title %>
- <%= render_slot(item) %>
+ {item.title}
+ {render_slot(item)}
@@ -642,9 +643,13 @@ defmodule PinchflatWeb.CoreComponents do
~H"""
- <%= k %>:
+ {k}:
- <%= v %>
+ <%= if is_binary(v) && URI.parse(v).scheme && URI.parse(v).scheme =~ "http" do %>
+ {v}
+ <% else %>
+ {v}
+ <% end %>
@@ -666,7 +671,7 @@ defmodule PinchflatWeb.CoreComponents do
<.link href={@href} class="text-sm font-semibold leading-6 text-zinc-900 hover:text-zinc-700">
<.icon name="hero-arrow-left-solid" class="h-3 w-3" />
- <%= render_slot(@inner_block) %>
+ {render_slot(@inner_block)}
"""
@@ -694,7 +699,7 @@ defmodule PinchflatWeb.CoreComponents do
attr :class, :string, default: nil
attr :rest, :global
- def icon(%{name: "hero-" <> _} = assigns) do
+ def icon(assigns) do
~H"""
"""
diff --git a/lib/pinchflat_web/components/custom_components/button_components.ex b/lib/pinchflat_web/components/custom_components/button_components.ex
index 61a8ed1..95f16cf 100644
--- a/lib/pinchflat_web/components/custom_components/button_components.ex
+++ b/lib/pinchflat_web/components/custom_components/button_components.ex
@@ -3,6 +3,7 @@ defmodule PinchflatWeb.CustomComponents.ButtonComponents do
use Phoenix.Component, global_prefixes: ~w(x-)
alias PinchflatWeb.CoreComponents
+ alias PinchflatWeb.CustomComponents.TextComponents
@doc """
Render a button
@@ -26,7 +27,7 @@ defmodule PinchflatWeb.CustomComponents.ButtonComponents do
~H"""
- <%= render_slot(@inner_block) %>
+ {render_slot(@inner_block)}
"""
end
@@ -66,7 +67,7 @@ defmodule PinchflatWeb.CustomComponents.ButtonComponents do
"font-medium text-white hover:bg-opacity-95"
]}
>
- <%= @text %>
+ {@text}
- <%= render_slot(option) %>
+ {render_slot(option)}
@@ -104,7 +105,7 @@ defmodule PinchflatWeb.CustomComponents.ButtonComponents do
def icon_button(assigns) do
~H"""
-
+
-
-
-
- <%= @tooltip %>
-
-
+
"""
end
end
diff --git a/lib/pinchflat_web/components/custom_components/tab_components.ex b/lib/pinchflat_web/components/custom_components/tab_components.ex
index 56cfe2a..e566b58 100644
--- a/lib/pinchflat_web/components/custom_components/tab_components.ex
+++ b/lib/pinchflat_web/components/custom_components/tab_components.ex
@@ -34,16 +34,16 @@ defmodule PinchflatWeb.CustomComponents.TabComponents do
x-bind:class={"openTab === '#{tab.id}' ? activeClasses : inactiveClasses"}
class="border-b-2 py-4 w-full sm:w-fit text-sm font-medium hover:text-meta-5 md:text-base"
>
- <%= tab.title %>
+ {tab.title}
- <%= render_slot(@tab_append) %>
+ {render_slot(@tab_append)}
-
+
- <%= render_slot(tab) %>
+ {render_slot(tab)}
diff --git a/lib/pinchflat_web/components/custom_components/table_components.ex b/lib/pinchflat_web/components/custom_components/table_components.ex
index 464de2e..16534a0 100644
--- a/lib/pinchflat_web/components/custom_components/table_components.ex
+++ b/lib/pinchflat_web/components/custom_components/table_components.ex
@@ -16,6 +16,8 @@ defmodule PinchflatWeb.CustomComponents.TableComponents do
"""
attr :rows, :list, required: true
attr :table_class, :string, default: ""
+ attr :sort_key, :string, default: nil
+ attr :sort_direction, :string, default: nil
attr :row_item, :any,
default: &Function.identity/1,
@@ -24,6 +26,7 @@ defmodule PinchflatWeb.CustomComponents.TableComponents do
slot :col, required: true do
attr :label, :string
attr :class, :string
+ attr :sort_key, :string
end
def table(assigns) do
@@ -31,8 +34,20 @@ defmodule PinchflatWeb.CustomComponents.TableComponents do
-
- <%= col[:label] %>
+
+
+ {col[:label]}
+ <.icon
+ :if={to_string(@sort_key) == col[:sort_key]}
+ name={if @sort_direction == :asc, do: "hero-chevron-up", else: "hero-chevron-down"}
+ class="w-3 h-3 mt-2 ml-1 absolute"
+ />
+
@@ -41,11 +56,11 @@ defmodule PinchflatWeb.CustomComponents.TableComponents do
- <%= render_slot(col, @row_item.(row)) %>
+ {render_slot(col, @row_item.(row))}
@@ -70,9 +85,9 @@ defmodule PinchflatWeb.CustomComponents.TableComponents do
= @total_pages && "cursor-not-allowed"
]}
phx-click={@page_number != @total_pages && "page_change"}
phx-value-direction="inc"
diff --git a/lib/pinchflat_web/components/custom_components/text_components.ex b/lib/pinchflat_web/components/custom_components/text_components.ex
index e5cc4eb..d6762ce 100644
--- a/lib/pinchflat_web/components/custom_components/text_components.ex
+++ b/lib/pinchflat_web/components/custom_components/text_components.ex
@@ -2,6 +2,7 @@ defmodule PinchflatWeb.CustomComponents.TextComponents do
@moduledoc false
use Phoenix.Component
+ alias Pinchflat.Utils.NumberUtils
alias PinchflatWeb.CoreComponents
@doc """
@@ -12,7 +13,7 @@ defmodule PinchflatWeb.CustomComponents.TextComponents do
def inline_code(assigns) do
~H"""
- <%= render_slot(@inner_block) %>
+ {render_slot(@inner_block)}
"""
end
@@ -26,7 +27,7 @@ defmodule PinchflatWeb.CustomComponents.TextComponents do
def inline_link(assigns) do
~H"""
<.link href={@href} target="_blank" class="text-blue-500 hover:text-blue-300">
- <%= render_slot(@inner_block) %>
+ {render_slot(@inner_block)}
"""
end
@@ -41,7 +42,7 @@ defmodule PinchflatWeb.CustomComponents.TextComponents do
def subtle_link(assigns) do
~H"""
<.link href={@href} target={@target} class="underline decoration-bodydark decoration-1 hover:decoration-white">
- <%= render_slot(@inner_block) %>
+ {render_slot(@inner_block)}
"""
end
@@ -62,20 +63,47 @@ defmodule PinchflatWeb.CustomComponents.TextComponents do
end
@doc """
- Renders a block of text with each line broken into a separate span.
+ Renders a block of text with each line broken into a separate span and links highlighted.
"""
attr :text, :string, required: true
- def break_on_newline(assigns) do
- broken_text =
- assigns.text
- |> String.split("\n", trim: false)
- |> Enum.intersperse(Phoenix.HTML.Tag.tag(:span, class: "inline-block mt-2"))
+ def render_description(assigns) do
+ formatted_text =
+ Regex.split(~r{https?://\S+}, assigns.text, include_captures: true)
+ |> Enum.map(fn
+ "http" <> _ = url -> {:url, url}
+ text -> Regex.split(~r{\n}, text, include_captures: true, trim: true)
+ end)
- assigns = Map.put(assigns, :text, broken_text)
+ assigns = Map.put(assigns, :text, formatted_text)
~H"""
- <%= @text %>
+
+ <.rendered_description_line :for={line <- @text} content={line} />
+
+ """
+ end
+
+ defp rendered_description_line(%{content: {:url, url}} = assigns) do
+ assigns = Map.put(assigns, :url, url)
+
+ ~H"""
+
+ {@url}
+
+ """
+ end
+
+ defp rendered_description_line(%{content: list_of_content} = assigns) do
+ assigns = Map.put(assigns, :list_of_content, list_of_content)
+
+ ~H"""
+
+ {inner_content}
+
"""
end
@@ -91,7 +119,7 @@ defmodule PinchflatWeb.CustomComponents.TextComponents do
assigns = Map.put(assigns, :timezone, timezone)
~H"""
- <%= Calendar.strftime(Timex.Timezone.convert(@datetime, @timezone), @format) %>
+ {Calendar.strftime(Timex.Timezone.convert(@datetime, @timezone), @format)}
"""
end
@@ -102,7 +130,7 @@ defmodule PinchflatWeb.CustomComponents.TextComponents do
def localized_number(assigns) do
~H"""
- <%= @number %>
+ {@number}
"""
end
@@ -115,7 +143,83 @@ defmodule PinchflatWeb.CustomComponents.TextComponents do
def pluralize(assigns) do
~H"""
- <%= @word %><%= if @count == 1, do: "", else: @suffix %>
+ {@word}{if @count == 1, do: "", else: @suffix}
+ """
+ end
+
+ @doc """
+ Renders a human-readable byte size
+ """
+
+ attr :byte_size, :integer, required: true
+
+ def readable_filesize(assigns) do
+ {num, suffix} = NumberUtils.human_byte_size(assigns.byte_size, precision: 2)
+
+ assigns =
+ Map.merge(assigns, %{
+ num: num,
+ suffix: suffix
+ })
+
+ ~H"""
+ <.localized_number number={@num} /> {@suffix}
+ """
+ end
+
+ @doc """
+ Renders a tooltip with the given content
+ """
+
+ attr :tooltip, :string, required: true
+ attr :position, :string, default: ""
+ attr :tooltip_class, :any, default: ""
+ attr :tooltip_arrow_class, :any, default: ""
+ slot :inner_block
+
+ def tooltip(%{position: "bottom-right"} = assigns) do
+ ~H"""
+ <.tooltip tooltip={@tooltip} tooltip_class={@tooltip_class} tooltip_arrow_class={["-top-1", @tooltip_arrow_class]}>
+ {render_slot(@inner_block)}
+
+ """
+ end
+
+ def tooltip(%{position: "bottom"} = assigns) do
+ ~H"""
+ <.tooltip
+ tooltip={@tooltip}
+ tooltip_class={["left-1/2 -translate-x-1/2", @tooltip_class]}
+ tooltip_arrow_class={["-top-1 left-1/2 -translate-x-1/2", @tooltip_arrow_class]}
+ >
+ {render_slot(@inner_block)}
+
+ """
+ end
+
+ def tooltip(assigns) do
+ ~H"""
+
+
+ {render_slot(@inner_block)}
+
+
+
"""
end
end
diff --git a/lib/pinchflat_web/components/layouts.ex b/lib/pinchflat_web/components/layouts.ex
index 03da41e..7312c30 100644
--- a/lib/pinchflat_web/components/layouts.ex
+++ b/lib/pinchflat_web/components/layouts.ex
@@ -15,11 +15,12 @@ defmodule PinchflatWeb.Layouts do
attr :text, :string, required: true
attr :href, :any, required: true
attr :target, :any, default: "_self"
+ attr :icon_class, :string, default: ""
def sidebar_item(assigns) do
~H"""
- <.sidebar_link icon={@icon} text={@text} href={@href} target={@target} />
+ <.sidebar_link icon={@icon} text={@text} href={@href} target={@target} icon_class={@icon_class} />
"""
end
@@ -61,10 +62,10 @@ defmodule PinchflatWeb.Layouts do
x-on:click="selected = !selected"
>
- <.icon name={@icon} /> <%= @text %>
+ <.icon name={@icon} /> {@text}
- <.icon name="hero-chevron-up" x-bind:class="{ 'rotate-180': selected }" />
+ <.icon name="hero-chevron-down" x-bind:class="{ 'rotate-180': selected }" />
@@ -89,6 +90,7 @@ defmodule PinchflatWeb.Layouts do
attr :href, :any, required: true
attr :target, :any, default: "_self"
attr :class, :string, default: ""
+ attr :icon_class, :string, default: ""
def sidebar_link(assigns) do
~H"""
@@ -103,7 +105,7 @@ defmodule PinchflatWeb.Layouts do
@class
]}
>
- <.icon :if={@icon} name={@icon} /> <%= @text %>
+ <.icon :if={@icon} name={@icon} class={@icon_class} /> {@text}
"""
end
diff --git a/lib/pinchflat_web/components/layouts/app.html.heex b/lib/pinchflat_web/components/layouts/app.html.heex
index 63762ff..491035c 100644
--- a/lib/pinchflat_web/components/layouts/app.html.heex
+++ b/lib/pinchflat_web/components/layouts/app.html.heex
@@ -6,7 +6,7 @@
<.flash_group flash={@flash} />
- <%= @inner_content %>
+ {@inner_content}
diff --git a/lib/pinchflat_web/components/layouts/onboarding.html.heex b/lib/pinchflat_web/components/layouts/onboarding.html.heex
index b2b4e97..e7f24d9 100644
--- a/lib/pinchflat_web/components/layouts/onboarding.html.heex
+++ b/lib/pinchflat_web/components/layouts/onboarding.html.heex
@@ -11,7 +11,7 @@
<.flash_group flash={@flash} />
- <%= @inner_content %>
+ {@inner_content}
diff --git a/lib/pinchflat_web/components/layouts/partials/sidebar.html.heex b/lib/pinchflat_web/components/layouts/partials/sidebar.html.heex
index 2d7ae0d..e7f89a3 100644
--- a/lib/pinchflat_web/components/layouts/partials/sidebar.html.heex
+++ b/lib/pinchflat_web/components/layouts/partials/sidebar.html.heex
@@ -8,7 +8,7 @@
>
-
+
@@ -47,8 +47,10 @@
text="Docs"
target="_blank"
href="https://github.com/kieraneglin/pinchflat/wiki"
+ icon_class="scale-110"
/>
- <.sidebar_item icon="hero-cog" text="Github" target="_blank" href="https://github.com/kieraneglin/pinchflat" />
+ <.sidebar_item icon="si-github" text="Github" target="_blank" href="https://github.com/kieraneglin/pinchflat" />
+ <.sidebar_item icon="si-discord" text="Discord" target="_blank" href="https://discord.gg/j7T6dCuwU4" />
- <.icon name="hero-currency-dollar" /> Donate
+ <.icon name="hero-currency-dollar" class="scale-110" /> Donate
@@ -67,7 +69,7 @@
class="group relative flex items-center gap-2.5 px-4 pt-2 text-sm"
x-on:click={"markVersionAsSeen('#{Application.spec(:pinchflat)[:vsn]}')"}
>
- Pinchflat <%= Application.spec(:pinchflat)[:vsn] %>
+ Pinchflat {Application.spec(:pinchflat)[:vsn]}
- yt-dlp <%= Settings.get!(:yt_dlp_version) %>
+ yt-dlp {Settings.get!(:yt_dlp_version)}
diff --git a/lib/pinchflat_web/components/layouts/partials/upgrade_button_live.ex b/lib/pinchflat_web/components/layouts/partials/upgrade_button_live.ex
index b120191..9641564 100644
--- a/lib/pinchflat_web/components/layouts/partials/upgrade_button_live.ex
+++ b/lib/pinchflat_web/components/layouts/partials/upgrade_button_live.ex
@@ -3,7 +3,7 @@ defmodule Pinchflat.UpgradeButtonLive do
def render(assigns) do
~H"""
-
diff --git a/lib/pinchflat_web/components/layouts/partials/upgrade_modal.heex b/lib/pinchflat_web/components/layouts/partials/upgrade_modal.heex
index 6422618..cbf5bc4 100644
--- a/lib/pinchflat_web/components/layouts/partials/upgrade_modal.heex
+++ b/lib/pinchflat_web/components/layouts/partials/upgrade_modal.heex
@@ -25,6 +25,6 @@
into the text box and press the button.
- <%= live_render(@conn, Pinchflat.UpgradeButtonLive) %>
+ {live_render(@conn, Pinchflat.UpgradeButtonLive)}
diff --git a/lib/pinchflat_web/components/layouts/root.html.heex b/lib/pinchflat_web/components/layouts/root.html.heex
index ba87086..4068d74 100644
--- a/lib/pinchflat_web/components/layouts/root.html.heex
+++ b/lib/pinchflat_web/components/layouts/root.html.heex
@@ -5,7 +5,7 @@
<.live_title>
- <%= assigns[:page_title] || "Pinchflat" %>
+ {assigns[:page_title] || "Pinchflat"}
@@ -21,7 +21,7 @@
class="dark text-bodydark bg-boxdark-2"
data-socket-path={Path.join(Application.get_env(:pinchflat, :base_route_path), "/live")}
>
- <%= @inner_content %>
+ {@inner_content}
<.donate_modal conn={@conn} />
diff --git a/lib/pinchflat_web/controllers/error_html.ex b/lib/pinchflat_web/controllers/error_html.ex
index 7fdeb0a..6a6e689 100644
--- a/lib/pinchflat_web/controllers/error_html.ex
+++ b/lib/pinchflat_web/controllers/error_html.ex
@@ -1,14 +1,7 @@
defmodule PinchflatWeb.ErrorHTML do
use PinchflatWeb, :html
- # If you want to customize your error pages,
- # uncomment the embed_templates/1 call below
- # and add pages to the error directory:
- #
- # * lib/pinchflat_web/controllers/error_html/404.html.heex
- # * lib/pinchflat_web/controllers/error_html/500.html.heex
- #
- # embed_templates "error_html/*"
+ embed_templates "error_html/*"
# The default is to render a plain text page based on
# the template name. For example, "404.html" becomes
diff --git a/lib/pinchflat_web/controllers/error_html/404.html.heex b/lib/pinchflat_web/controllers/error_html/404.html.heex
new file mode 100644
index 0000000..d49f5f5
--- /dev/null
+++ b/lib/pinchflat_web/controllers/error_html/404.html.heex
@@ -0,0 +1,3 @@
+
diff --git a/lib/pinchflat_web/controllers/error_html/500.html.heex b/lib/pinchflat_web/controllers/error_html/500.html.heex
new file mode 100644
index 0000000..6d34195
--- /dev/null
+++ b/lib/pinchflat_web/controllers/error_html/500.html.heex
@@ -0,0 +1,29 @@
+
+ Internal Server Error
+
+ This shouldn't happen! Please make a
+ <.inline_link href="https://github.com/kieraneglin/pinchflat/issues/new/choose">GitHub issue
+ with the following information:
+
+
+
+ What you were doing when you saw this page
+
+ Your system details and logs from
+ <.inline_link href={~p"/app_info"}>app info
+
+ All the information in the textarea below (use select all + copy)
+
+
+
diff --git a/lib/pinchflat_web/controllers/media_items/media_item_html/actions_dropdown.html.heex b/lib/pinchflat_web/controllers/media_items/media_item_html/actions_dropdown.html.heex
index fcd4589..cd40fb7 100644
--- a/lib/pinchflat_web/controllers/media_items/media_item_html/actions_dropdown.html.heex
+++ b/lib/pinchflat_web/controllers/media_items/media_item_html/actions_dropdown.html.heex
@@ -2,7 +2,7 @@
<:option>
copied = true,
() => copied = false
)
diff --git a/lib/pinchflat_web/controllers/media_items/media_item_html/edit.html.heex b/lib/pinchflat_web/controllers/media_items/media_item_html/edit.html.heex
index 0924a77..0fdac1d 100644
--- a/lib/pinchflat_web/controllers/media_items/media_item_html/edit.html.heex
+++ b/lib/pinchflat_web/controllers/media_items/media_item_html/edit.html.heex
@@ -1,6 +1,6 @@
-
- Editing "<%= StringUtils.truncate(@media_item.title, 35) %>"
+
+ Editing "{@media_item.title}"
diff --git a/lib/pinchflat_web/controllers/media_items/media_item_html/media_preview.heex b/lib/pinchflat_web/controllers/media_items/media_item_html/media_preview.heex
index 5ad0533..6553f51 100644
--- a/lib/pinchflat_web/controllers/media_items/media_item_html/media_preview.heex
+++ b/lib/pinchflat_web/controllers/media_items/media_item_html/media_preview.heex
@@ -1,13 +1,13 @@
<%= if media_type(@media_item) == :video do %>
-
-
+
+
Your browser does not support the video element.
<% end %>
<%= if media_type(@media_item) == :audio do %>
-
+
Your browser does not support the audio element.
<% end %>
diff --git a/lib/pinchflat_web/controllers/media_items/media_item_html/show.html.heex b/lib/pinchflat_web/controllers/media_items/media_item_html/show.html.heex
index e2442c6..f531731 100644
--- a/lib/pinchflat_web/controllers/media_items/media_item_html/show.html.heex
+++ b/lib/pinchflat_web/controllers/media_items/media_item_html/show.html.heex
@@ -1,10 +1,10 @@
-
+
<.link href={~p"/sources/#{@media_item.source_id}"}>
<.icon name="hero-arrow-left" class="w-10 h-10 hover:dark:text-white" />
-
- <%= StringUtils.truncate(@media_item.title, 35) %>
+
+ {@media_item.title}
@@ -16,7 +16,7 @@
-
+
<.tabbed_layout>
<:tab_append>
@@ -24,53 +24,67 @@
<:tab title="Media" id="media">
-
+
+
+
+ <.icon name="hero-exclamation-circle-solid" class="text-red-500" />
+
Last Error
+
+ {@media_item.last_error}
+
+
<%= if media_file_exists?(@media_item) do %>
-
+
<.media_preview media_item={@media_item} />
-
- Uploaded: <%= DateTime.to_date(@media_item.uploaded_at) %>
+
+ {@media_item.title}
+ Uploaded: {DateTime.to_date(@media_item.uploaded_at)}
<.subtle_link href={@media_item.original_url} target="_blank">Open Original
or
- <.subtle_link href={~p"/media/#{@media_item.uuid}/stream"} target="_blank">
+ <.subtle_link
+ href={~p"/media/#{@media_item.uuid}/stream?v=#{DateTime.to_unix(@media_item.updated_at)}"}
+ target="_blank"
+ >
Open Local Stream
- <.break_on_newline text={@media_item.description} />
+ <.render_description text={@media_item.description} />
<% end %>
- Raw Attributes
- Source:
- <.subtle_link href={~p"/sources/#{@media_item.source_id}"}>
- <%= @media_item.source.custom_name %>
-
- <.list_items_from_map map={Map.from_struct(@media_item)} />
+ Raw Attributes
+
+ Source:
+ <.subtle_link href={~p"/sources/#{@media_item.source_id}"}>
+ {@media_item.source.custom_name}
+
+ <.list_items_from_map map={Map.from_struct(@media_item)} />
+
<:tab title="Tasks" id="tasks">
<%= if match?([_|_], @media_item.tasks) do %>
- <.table rows={@media_item.tasks} table_class="text-black dark:text-white">
+ <.table rows={@media_item.tasks} table_class="text-white">
<:col :let={task} label="Worker">
- <%= task.job.worker %>
+ {task.job.worker}
<:col :let={task} label="State">
- <%= task.job.state %>
+ {task.job.state}
<:col :let={task} label="Scheduled At">
- <%= Calendar.strftime(task.job.scheduled_at, "%y-%m-%d %I:%M:%S %p %Z") %>
+ <.datetime_in_zone datetime={task.job.scheduled_at} />
<% else %>
diff --git a/lib/pinchflat_web/controllers/media_profiles/media_profile_controller.ex b/lib/pinchflat_web/controllers/media_profiles/media_profile_controller.ex
index 22a4916..923ce7a 100644
--- a/lib/pinchflat_web/controllers/media_profiles/media_profile_controller.ex
+++ b/lib/pinchflat_web/controllers/media_profiles/media_profile_controller.ex
@@ -1,24 +1,51 @@
defmodule PinchflatWeb.MediaProfiles.MediaProfileController do
use PinchflatWeb, :controller
use Pinchflat.Sources.SourcesQuery
+ use Pinchflat.Profiles.ProfilesQuery
alias Pinchflat.Repo
alias Pinchflat.Profiles
+ alias Pinchflat.Sources.Source
alias Pinchflat.Profiles.MediaProfile
+ alias Pinchflat.Profiles.MediaProfileDeletionWorker
def index(conn, _params) do
- media_profiles =
- MediaProfile
- |> order_by(asc: :name)
- |> Repo.all()
+ media_profiles_query =
+ from mp in MediaProfile,
+ as: :media_profile,
+ where: is_nil(mp.marked_for_deletion_at),
+ order_by: [asc: mp.name],
+ select: map(mp, ^MediaProfile.__schema__(:fields)),
+ select_merge: %{
+ source_count:
+ subquery(
+ from s in Source,
+ where: s.media_profile_id == parent_as(:media_profile).id,
+ select: count(s.id)
+ )
+ }
- render(conn, :index, media_profiles: media_profiles)
+ render(conn, :index, media_profiles: Repo.all(media_profiles_query))
end
- def new(conn, _params) do
- changeset = Profiles.change_media_profile(%MediaProfile{})
+ def new(conn, params) do
+ # Preload an existing media profile for faster creation
+ cs_struct =
+ case to_string(params["template_id"]) do
+ "" -> %MediaProfile{}
+ template_id -> Repo.get(MediaProfile, template_id) || %MediaProfile{}
+ end
- render(conn, :new, changeset: changeset, layout: get_onboarding_layout())
+ render(conn, :new,
+ layout: get_onboarding_layout(),
+ changeset:
+ Profiles.change_media_profile(%MediaProfile{
+ cs_struct
+ | id: nil,
+ name: nil,
+ marked_for_deletion_at: nil
+ })
+ )
end
def create(conn, %{"media_profile" => media_profile_params}) do
@@ -70,19 +97,15 @@ defmodule PinchflatWeb.MediaProfiles.MediaProfileController do
end
def delete(conn, %{"id" => id} = params) do
- delete_files = Map.get(params, "delete_files", false)
+ # This awkward comparison converts the string to a boolean
+ delete_files = Map.get(params, "delete_files", "") == "true"
media_profile = Profiles.get_media_profile!(id)
- {:ok, _media_profile} = Profiles.delete_media_profile(media_profile, delete_files: delete_files)
- flash_message =
- if delete_files do
- "Media profile, its sources, and its files deleted successfully."
- else
- "Media profile and its sources deleted successfully. Files were not deleted."
- end
+ {:ok, _} = Profiles.update_media_profile(media_profile, %{marked_for_deletion_at: DateTime.utc_now()})
+ MediaProfileDeletionWorker.kickoff(media_profile, %{delete_files: delete_files})
conn
- |> put_flash(:info, flash_message)
+ |> put_flash(:info, "Media Profile deletion started. This may take a while to complete.")
|> redirect(to: ~p"/media_profiles")
end
diff --git a/lib/pinchflat_web/controllers/media_profiles/media_profile_html.ex b/lib/pinchflat_web/controllers/media_profiles/media_profile_html.ex
index 44ed8da..72beafe 100644
--- a/lib/pinchflat_web/controllers/media_profiles/media_profile_html.ex
+++ b/lib/pinchflat_web/controllers/media_profiles/media_profile_html.ex
@@ -10,6 +10,7 @@ defmodule PinchflatWeb.MediaProfiles.MediaProfileHTML do
"""
attr :changeset, Ecto.Changeset, required: true
attr :action, :string, required: true
+ attr :method, :string, required: true
def media_profile_form(assigns)
@@ -25,6 +26,7 @@ defmodule PinchflatWeb.MediaProfiles.MediaProfileHTML do
[
{"8k", "4320p"},
{"4k", "2160p"},
+ {"1440p", "1440p"},
{"1080p", "1080p"},
{"720p", "720p"},
{"480p", "480p"},
@@ -36,6 +38,7 @@ defmodule PinchflatWeb.MediaProfiles.MediaProfileHTML do
def friendly_sponsorblock_options do
[
{"Disabled (default)", "disabled"},
+ {"Mark Segments as Chapters", "mark"},
{"Remove Segments", "remove"}
]
end
@@ -53,12 +56,24 @@ defmodule PinchflatWeb.MediaProfiles.MediaProfileHTML do
]
end
- def custom_output_template_options do
- %{
+ def media_center_custom_output_template_options do
+ [
+ season_by_year__episode_by_date: "
Season YYYY/sYYYYeMMDD",
+ season_by_year__episode_by_date_and_index:
+ "same as the above but it handles dates better.
This is the recommended option ",
+ static_season__episode_by_index:
+ "
Season 1/s01eXX where
XX is the video's position in the playlist. Only recommended for playlists (not channels) that don't change",
+ static_season__episode_by_date:
+ "
Season 1/s01eYYMMDD. Recommended for playlists that might change or where order isn't important"
+ ]
+ end
+
+ def other_custom_output_template_options do
+ [
upload_day: nil,
upload_month: nil,
upload_year: nil,
- upload_yyyy_mm_dd: "the upload date in the format YYYY-MM-DD",
+ upload_yyyy_mm_dd: "the upload date in the format
YYYY-MM-DD",
source_custom_name: "the name of the sources that use this profile",
source_collection_id: "the YouTube ID of the sources that use this profile",
source_collection_name:
@@ -66,10 +81,15 @@ defmodule PinchflatWeb.MediaProfiles.MediaProfileHTML do
source_collection_type: "the collection type of the sources using this profile. Either 'channel' or 'playlist'",
artist_name: "the name of the artist with fallbacks to other uploader fields",
season_from_date: "alias for upload_year",
- season_episode_from_date: "the upload date formatted as sYYYYeMMDD",
+ season_episode_from_date: "the upload date formatted as
sYYYYeMMDD",
season_episode_index_from_date:
- "the upload date formatted as sYYYYeMMDDII where II is an index to prevent date collisions"
- }
+ "the upload date formatted as
sYYYYeMMDDII where
II is an index to prevent date collisions",
+ media_playlist_index:
+ "the place of the media item in the playlist. Do not use with channels. May not work if the playlist is updated",
+ media_item_id: "the ID of the media item in Pinchflat's database",
+ source_id: "the ID of the source in Pinchflat's database",
+ media_profile_id: "the ID of the media profile in Pinchflat's database"
+ ]
end
def common_output_template_options do
@@ -77,7 +97,6 @@ defmodule PinchflatWeb.MediaProfiles.MediaProfileHTML do
id
ext
title
- fulltitle
uploader
channel
upload_date
@@ -99,7 +118,7 @@ defmodule PinchflatWeb.MediaProfiles.MediaProfileHTML do
end
defp media_center_output_template do
- "/shows/{{ source_custom_name }}/Season {{ season_from_date }}/{{ season_episode_index_from_date }} - {{ title }}.{{ ext }}"
+ "/shows/{{ source_custom_name }}/{{ season_by_year__episode_by_date_and_index }} - {{ title }}.{{ ext }}"
end
defp audio_output_template do
diff --git a/lib/pinchflat_web/controllers/media_profiles/media_profile_html/actions_dropdown.html.heex b/lib/pinchflat_web/controllers/media_profiles/media_profile_html/actions_dropdown.html.heex
index 29fbe42..e4f42cf 100644
--- a/lib/pinchflat_web/controllers/media_profiles/media_profile_html/actions_dropdown.html.heex
+++ b/lib/pinchflat_web/controllers/media_profiles/media_profile_html/actions_dropdown.html.heex
@@ -2,7 +2,7 @@
<:option>
copied = true,
() => copied = false
)
@@ -11,6 +11,11 @@
<.icon name="hero-check" class="ml-2 h-4 w-4" />
+ <:option>
+ <.link href={~p"/media_profiles/new?template_id=#{@media_profile}"} method="get">
+ Use as Template
+
+
<:option>
diff --git a/lib/pinchflat_web/controllers/media_profiles/media_profile_html/edit.html.heex b/lib/pinchflat_web/controllers/media_profiles/media_profile_html/edit.html.heex
index 27459f0..4c0d93f 100644
--- a/lib/pinchflat_web/controllers/media_profiles/media_profile_html/edit.html.heex
+++ b/lib/pinchflat_web/controllers/media_profiles/media_profile_html/edit.html.heex
@@ -2,15 +2,15 @@
<.link href={~p"/media_profiles"}>
<.icon name="hero-arrow-left" class="w-10 h-10 hover:dark:text-white" />
-
- Editing "<%= @media_profile.name %>"
+
+ Editing "{@media_profile.name}"
- <.media_profile_form changeset={@changeset} action={~p"/media_profiles/#{@media_profile}"} />
+ <.media_profile_form changeset={@changeset} action={~p"/media_profiles/#{@media_profile}"} method="patch" />
diff --git a/lib/pinchflat_web/controllers/media_profiles/media_profile_html/index.html.heex b/lib/pinchflat_web/controllers/media_profiles/media_profile_html/index.html.heex
index e3a2dee..1cbd719 100644
--- a/lib/pinchflat_web/controllers/media_profiles/media_profile_html/index.html.heex
+++ b/lib/pinchflat_web/controllers/media_profiles/media_profile_html/index.html.heex
@@ -10,18 +10,22 @@
-
<.table rows={@media_profiles} table_class="text-black dark:text-white">
- <:col :let={media_profile} label="Name">
+ <:col :let={media_profile} label="Name" class="truncate max-w-xs">
<.subtle_link href={~p"/media_profiles/#{media_profile.id}"}>
- <%= media_profile.name %>
+ {media_profile.name}
<:col :let={media_profile} label="Preferred Resolution">
- <%= media_profile.preferred_resolution %>
+ {media_profile.preferred_resolution}
+
+ <:col :let={media_profile} label="Sources">
+ <.subtle_link href={~p"/media_profiles/#{media_profile.id}/#tab-sources"}>
+ <.localized_number number={media_profile.source_count} />
+
<:col :let={media_profile} label="" class="flex justify-end">
<.icon_link href={~p"/media_profiles/#{media_profile.id}/edit"} icon="hero-pencil-square" class="mr-4" />
diff --git a/lib/pinchflat_web/controllers/media_profiles/media_profile_html/media_profile_form.html.heex b/lib/pinchflat_web/controllers/media_profiles/media_profile_html/media_profile_form.html.heex
index c30b3df..618cc7a 100644
--- a/lib/pinchflat_web/controllers/media_profiles/media_profile_html/media_profile_form.html.heex
+++ b/lib/pinchflat_web/controllers/media_profiles/media_profile_html/media_profile_form.html.heex
@@ -1,4 +1,11 @@
-<.simple_form :let={f} for={@changeset} action={@action}>
+<.simple_form
+ :let={f}
+ for={@changeset}
+ action={@action}
+ method={@method}
+ x-data="{ advancedMode: !!JSON.parse(localStorage.getItem('advancedMode')) }"
+ x-init="$watch('advancedMode', value => localStorage.setItem('advancedMode', JSON.stringify(value)))"
+>
<.error :if={@changeset.action}>
Oops, something went wrong! Please check the errors below.
@@ -30,10 +37,14 @@
-
- General Options
-
-
+
+
+ General Options
+
+
+ Editing Mode:
+
+
- <.input
- field={f[:download_auto_subs]}
- type="toggle"
- label="Download Autogenerated Subtitles"
- help="Prefers normal subs but will download autogenerated if needed. Requires 'Download Subtitles' to be enabled"
- x-init="$watch('selectedPreset', p => p && (enabled = presets[p]))"
- />
-
-
<.input
field={f[:embed_subs]}
type="toggle"
label="Embed Subtitles"
- help="Downloads and embeds subtitles in the media file itself, if supported. Uneffected by 'Download Subtitles' (recommended)"
+ help="Downloads and embeds subtitles in the media file itself, if supported. Unaffected by 'Download Subtitles'"
+ x-init="$watch('selectedPreset', p => p && (enabled = presets[p]))"
+ />
+
+
+
+ <.input
+ field={f[:download_auto_subs]}
+ type="toggle"
+ label="Use Autogenerated Subtitles"
+ help="Prefers normal subs with 'Download Subtitles' or 'Embed Subtitles' but will use autogenerated subs if needed."
x-init="$watch('selectedPreset', p => p && (enabled = presets[p]))"
/>
@@ -114,6 +125,16 @@
/>
+
+ <.input
+ field={f[:audio_track]}
+ placeholder="de"
+ type="text"
+ label="Audio Track Language"
+ help="Only works if there are multiple audio tracks. Use either a language code, 'original' for the original audio track, or 'default' for YouTube's preference. Or just leave it blank"
+ />
+
+
Thumbnail Options
@@ -133,7 +154,7 @@
field={f[:embed_thumbnail]}
type="toggle"
label="Embed Thumbnail"
- help="Downloads and embeds thumbnail in the media file itself, if supported. Uneffected by 'Download Thumbnail' (recommended)"
+ help="Downloads and embeds thumbnail in the media file itself, if supported. Unaffected by 'Download Thumbnail' (recommended)"
x-init="$watch('selectedPreset', p => p && (enabled = presets[p]))"
/>
@@ -157,7 +178,7 @@
field={f[:embed_metadata]}
type="toggle"
label="Embed Metadata"
- help="Downloads and embeds metadata in the media file itself, if supported. Uneffected by 'Download Metadata' (recommended)"
+ help="Downloads and embeds metadata in the media file itself, if supported. Unaffected by 'Download Metadata' (recommended)"
x-init="$watch('selectedPreset', p => p && (enabled = presets[p]))"
/>
@@ -203,6 +224,16 @@
/>
+
+ <.input
+ field={f[:media_container]}
+ type="text"
+ label="Media Container"
+ placeholder="mp4"
+ help="Don't change this if you're going to consume media via Plex. Leave blank for default"
+ />
+
+
<.input
field={f[:redownload_delay_days]}
diff --git a/lib/pinchflat_web/controllers/media_profiles/media_profile_html/new.html.heex b/lib/pinchflat_web/controllers/media_profiles/media_profile_html/new.html.heex
index 0fe77fe..e301d73 100644
--- a/lib/pinchflat_web/controllers/media_profiles/media_profile_html/new.html.heex
+++ b/lib/pinchflat_web/controllers/media_profiles/media_profile_html/new.html.heex
@@ -8,7 +8,7 @@
- <.media_profile_form changeset={@changeset} action={~p"/media_profiles"} />
+ <.media_profile_form changeset={@changeset} action={~p"/media_profiles"} method="post" />
diff --git a/lib/pinchflat_web/controllers/media_profiles/media_profile_html/output_template_help.html.heex b/lib/pinchflat_web/controllers/media_profiles/media_profile_html/output_template_help.html.heex
index 7dd6df2..e17e846 100644
--- a/lib/pinchflat_web/controllers/media_profiles/media_profile_html/output_template_help.html.heex
+++ b/lib/pinchflat_web/controllers/media_profiles/media_profile_html/output_template_help.html.heex
@@ -1,12 +1,14 @@
<%!-- The heex HTML formatter is really struggling with this file - I apologize in advance --%>
Output Template Syntax
-
+
When generating an output template, you have 3 options for syntax:
Liquid-style:
- <.inline_code>/{{ channel }}/{{ title }} - {{ id }}.{{ ext }}
+ <.inline_code>
+ {"/#{double_brace("channel")}/#{double_brace("title")} - #{double_brace("id")}.#{double_brace("ext")}"}
+
yt-dlp-style
@@ -17,7 +19,7 @@
Any bare words:
- <.inline_code>/videos/1080p/{{ id }}.{{ ext }}
+ <.inline_code>{"/videos/1080p/#{double_brace("id")}.#{double_brace("ext")}"}
@@ -25,7 +27,7 @@
any
single-word yt-dlp
option can be used as liquid-style and it's automatically made filepath-safe. For example, the
- <.inline_code>{{ duration }}
+ <.inline_code>{double_brace("duration")}
option is translated to
<.inline_code>%(duration)S
@@ -33,12 +35,14 @@
Major 🔑:
these syntaxes can be mixed and matched freely! I prefer to use liquid-style and bare words
but I'll include yt-dlp-style when I need more control. For example:
- <.inline_code>/1080p/{{ channel }}/{{ title }}-(%(subtitles.en.-1.ext)s).{{ ext }}
+ <.inline_code>
+ {"/1080p/#{double_brace("channel")}/#{double_brace("title")}-(%(subtitles.en.-1.ext)s).#{double_brace("ext")}"}
+
NOTE:
Your template must
- end with an extension option (<.inline_code>.{{ ext }}
+ end with an extension option (<.inline_code>.{double_brace("ext")}
or
<.inline_code>.%(ext)S).
Downloading won't work as expected without it.
@@ -46,7 +50,7 @@
Template Options
-
+
Any single-word yt-dlp
option
@@ -56,17 +60,24 @@
can be used with the curly braced liquid-style syntax.
This is just a list of the most common options as well as some custom aliases
- Custom Aliases
-
-
- <.inline_code>{{ <%= k %> }}
- - <%= v %>
+ Media Center Custom Aliases
+
+
+ <.inline_code>{double_brace(k)}
+ - {html_escape({:safe, v})}
+
+
+ Other Custom Aliases
+
+
+ <.inline_code>{double_brace(k)}
+ - {html_escape({:safe, v})}
Common Options
-
- <.inline_code>{{ <%= opt %> }}
+
+ <.inline_code>{double_brace(opt)}
diff --git a/lib/pinchflat_web/controllers/media_profiles/media_profile_html/show.html.heex b/lib/pinchflat_web/controllers/media_profiles/media_profile_html/show.html.heex
index ef0629f..641296e 100644
--- a/lib/pinchflat_web/controllers/media_profiles/media_profile_html/show.html.heex
+++ b/lib/pinchflat_web/controllers/media_profiles/media_profile_html/show.html.heex
@@ -1,10 +1,10 @@
-
+
<.link href={~p"/media_profiles"}>
<.icon name="hero-arrow-left" class="w-10 h-10 hover:dark:text-white" />
-
- <%= @media_profile.name %>
+
+ {@media_profile.name}
@@ -25,18 +25,20 @@
<:tab title="Media Profile" id="media-profile">
-
Raw Attributes
- <.list_items_from_map map={Map.from_struct(@media_profile)} />
+
+ Raw Attributes
+ <.list_items_from_map map={Map.from_struct(@media_profile)} />
+
<:tab title="Sources" id="sources">
<.table rows={@sources} table_class="text-black dark:text-white">
<:col :let={source} label="Name">
<.subtle_link href={~p"/sources/#{source.id}"}>
- <%= source.custom_name || source.collection_name %>
+ {source.custom_name || source.collection_name}
- <:col :let={source} label="Type"><%= source.collection_type %>
+ <:col :let={source} label="Type">{source.collection_type}
<:col :let={source} label="Should Download?">
<.icon name={if source.download_media, do: "hero-check", else: "hero-x-mark"} />
diff --git a/lib/pinchflat_web/controllers/pages/page_html.ex b/lib/pinchflat_web/controllers/pages/page_html.ex
index c96a6c9..16f7731 100644
--- a/lib/pinchflat_web/controllers/pages/page_html.ex
+++ b/lib/pinchflat_web/controllers/pages/page_html.ex
@@ -1,23 +1,5 @@
defmodule PinchflatWeb.Pages.PageHTML do
use PinchflatWeb, :html
- alias Pinchflat.Utils.NumberUtils
-
embed_templates "page_html/*"
-
- attr :media_filesize, :integer, required: true
-
- def readable_media_filesize(assigns) do
- {num, suffix} = NumberUtils.human_byte_size(assigns.media_filesize, precision: 2)
-
- assigns =
- Map.merge(assigns, %{
- num: num,
- suffix: suffix
- })
-
- ~H"""
- <.localized_number number={@num} /> <%= @suffix %>
- """
- end
end
diff --git a/lib/pinchflat_web/controllers/pages/page_html/history_table_live.ex b/lib/pinchflat_web/controllers/pages/page_html/history_table_live.ex
index 13919ab..6c104aa 100644
--- a/lib/pinchflat_web/controllers/pages/page_html/history_table_live.ex
+++ b/lib/pinchflat_web/controllers/pages/page_html/history_table_live.ex
@@ -28,23 +28,35 @@ defmodule Pinchflat.Pages.HistoryTableLive do
<.table rows={@records} table_class="text-white">
- <:col :let={media_item} label="Title">
- <.subtle_link href={~p"/sources/#{media_item.source_id}/media/#{media_item}"}>
- <%= StringUtils.truncate(media_item.title, 35) %>
-
+ <:col :let={media_item} label="Title" class="max-w-xs">
+
+ <.tooltip
+ :if={media_item.last_error}
+ tooltip={media_item.last_error}
+ position="bottom-right"
+ tooltip_class="w-64"
+ >
+ <.icon name="hero-exclamation-circle-solid" class="text-red-500" />
+
+
+ <.subtle_link href={~p"/sources/#{media_item.source_id}/media/#{media_item.id}"}>
+ {media_item.title}
+
+
+
<:col :let={media_item} label="Upload Date">
- <%= DateTime.to_date(media_item.uploaded_at) %>
+ {DateTime.to_date(media_item.uploaded_at)}
<:col :let={media_item} label="Indexed At">
- <%= format_datetime(media_item.inserted_at) %>
+ {format_datetime(media_item.inserted_at)}
<:col :let={media_item} label="Downloaded At">
- <%= format_datetime(media_item.media_downloaded_at) %>
+ {format_datetime(media_item.media_downloaded_at)}
- <:col :let={media_item} label="Source">
+ <:col :let={media_item} label="Source" class="truncate max-w-xs">
<.subtle_link href={~p"/sources/#{media_item.source_id}"}>
- <%= StringUtils.truncate(media_item.source.custom_name, 35) %>
+ {media_item.source.custom_name}
@@ -56,9 +68,9 @@ defmodule Pinchflat.Pages.HistoryTableLive do
"""
end
- def mount(_params, _session, socket) do
+ def mount(_params, session, socket) do
page = 1
- base_query = generate_base_query()
+ base_query = generate_base_query(session["media_state"])
pagination_attrs = fetch_pagination_attributes(base_query, page)
{:ok, assign(socket, Map.merge(pagination_attrs, %{base_query: base_query}))}
@@ -97,10 +109,17 @@ defmodule Pinchflat.Pages.HistoryTableLive do
|> Repo.preload(:source)
end
- defp generate_base_query do
+ defp generate_base_query("pending") do
MediaQuery.new()
|> MediaQuery.require_assoc(:media_profile)
- |> where(^dynamic(^MediaQuery.downloaded() or ^MediaQuery.pending()))
+ |> where(^dynamic(^MediaQuery.pending()))
+ |> order_by(desc: :id)
+ end
+
+ defp generate_base_query("downloaded") do
+ MediaQuery.new()
+ |> MediaQuery.require_assoc(:media_profile)
+ |> where(^dynamic(^MediaQuery.downloaded()))
|> order_by(desc: :id)
end
diff --git a/lib/pinchflat_web/controllers/pages/page_html/home.html.heex b/lib/pinchflat_web/controllers/pages/page_html/home.html.heex
index 34a99cf..6d3b36f 100644
--- a/lib/pinchflat_web/controllers/pages/page_html/home.html.heex
+++ b/lib/pinchflat_web/controllers/pages/page_html/home.html.heex
@@ -33,7 +33,7 @@
Library Size
- <.readable_media_filesize media_filesize={@media_item_size} />
+ <.readable_filesize byte_size={@media_item_size} />
@@ -41,14 +41,23 @@
Media History
-
- <%= live_render(@conn, Pinchflat.Pages.HistoryTableLive) %>
-
-
-
-
- Active Tasks
-
- <%= live_render(@conn, Pinchflat.Pages.JobTableLive) %>
-
+ <.tabbed_layout>
+ <:tab title="Downloaded" id="downloaded">
+ {live_render(
+ @conn,
+ Pinchflat.Pages.HistoryTableLive,
+ session: %{"media_state" => "downloaded"}
+ )}
+
+ <:tab title="Pending" id="pending">
+ {live_render(
+ @conn,
+ Pinchflat.Pages.HistoryTableLive,
+ session: %{"media_state" => "pending"}
+ )}
+
+ <:tab title="Active Tasks" id="active-tasks">
+ {live_render(@conn, Pinchflat.Pages.JobTableLive)}
+
+
diff --git a/lib/pinchflat_web/controllers/pages/page_html/job_table_live.ex b/lib/pinchflat_web/controllers/pages/page_html/job_table_live.ex
index d65f724..7e6bdc0 100644
--- a/lib/pinchflat_web/controllers/pages/page_html/job_table_live.ex
+++ b/lib/pinchflat_web/controllers/pages/page_html/job_table_live.ex
@@ -19,18 +19,18 @@ defmodule Pinchflat.Pages.JobTableLive do
<.table rows={@tasks} table_class="text-white">
<:col :let={task} label="Task">
- <%= worker_to_task_name(task.job.worker) %>
+ {worker_to_task_name(task.job.worker)}
- <:col :let={task} label="Subject">
+ <:col :let={task} label="Subject" class="truncate max-w-xs">
<.subtle_link href={task_to_link(task)}>
- <%= StringUtils.truncate(task_to_record_name(task), 35) %>
+ {task_to_record_name(task)}
<:col :let={task} label="Attempt No.">
- <%= task.job.attempt %>
+ {task.job.attempt}
<:col :let={task} label="Started At">
- <%= format_datetime(task.job.attempted_at) %>
+ {format_datetime(task.job.attempted_at)}
diff --git a/lib/pinchflat_web/controllers/podcasts/podcast_controller.ex b/lib/pinchflat_web/controllers/podcasts/podcast_controller.ex
index d69e4f6..84401fe 100644
--- a/lib/pinchflat_web/controllers/podcasts/podcast_controller.ex
+++ b/lib/pinchflat_web/controllers/podcasts/podcast_controller.ex
@@ -6,8 +6,19 @@ defmodule PinchflatWeb.Podcasts.PodcastController do
alias Pinchflat.Sources.Source
alias Pinchflat.Media.MediaItem
alias Pinchflat.Podcasts.RssFeedBuilder
+ alias Pinchflat.Podcasts.OpmlFeedBuilder
alias Pinchflat.Podcasts.PodcastHelpers
+ def opml_feed(conn, _params) do
+ url_base = url(conn, ~p"/")
+ xml = OpmlFeedBuilder.build(url_base, PodcastHelpers.opml_sources())
+
+ conn
+ |> put_resp_content_type("application/opml+xml")
+ |> put_resp_header("content-disposition", "inline")
+ |> send_resp(200, xml)
+ end
+
def rss_feed(conn, %{"uuid" => uuid}) do
source = Repo.get_by!(Source, uuid: uuid)
url_base = url(conn, ~p"/")
diff --git a/lib/pinchflat_web/controllers/searches/search_html.ex b/lib/pinchflat_web/controllers/searches/search_html.ex
index 1e049be..eea90e3 100644
--- a/lib/pinchflat_web/controllers/searches/search_html.ex
+++ b/lib/pinchflat_web/controllers/searches/search_html.ex
@@ -14,7 +14,7 @@ defmodule PinchflatWeb.Searches.SearchHTML do
~H"""
<%= for fragment <- @split_string do %>
- <%= render_fragment(fragment) %>
+ {render_fragment(fragment)}
<% end %>
"""
end
diff --git a/lib/pinchflat_web/controllers/searches/search_html/show.html.heex b/lib/pinchflat_web/controllers/searches/search_html/show.html.heex
index 47a09fb..3e50066 100644
--- a/lib/pinchflat_web/controllers/searches/search_html/show.html.heex
+++ b/lib/pinchflat_web/controllers/searches/search_html/show.html.heex
@@ -1,6 +1,6 @@
-
- Results for "<%= StringUtils.truncate(@search_term, 50) %>"
+
+ Results for "{@search_term}"
@@ -9,9 +9,9 @@
<%= if match?([_|_], @search_results) do %>
<.table rows={@search_results} table_class="text-black dark:text-white">
- <:col :let={result} label="Title">
+ <:col :let={result} label="Title" class="truncate max-w-xs">
<.subtle_link href={~p"/sources/#{result.source_id}/media/#{result.id}"}>
- <%= StringUtils.truncate(result.title, 35) %>
+ {result.title}
<:col :let={result} label="Excerpt">
diff --git a/lib/pinchflat_web/controllers/settings/setting_html/setting_form.html.heex b/lib/pinchflat_web/controllers/settings/setting_html/setting_form.html.heex
index 2cf1701..c59031f 100644
--- a/lib/pinchflat_web/controllers/settings/setting_html/setting_form.html.heex
+++ b/lib/pinchflat_web/controllers/settings/setting_html/setting_form.html.heex
@@ -19,28 +19,50 @@
- <%= live_render(
+ {live_render(
@conn,
Pinchflat.Settings.AppriseServerLive,
session: %{"value" => f[:apprise_server].value}
- ) %>
+ )}
- Indexing Settings
+ Extractor Settings
<.input
field={f[:youtube_api_key]}
- placeholder="ABC123"
+ placeholder="ABC123,DEF456"
type="text"
- label="YouTube API Key"
+ label="YouTube API Key(s)"
help={youtube_api_help()}
html_help={true}
inputclass="font-mono text-sm mr-4"
/>
+
+ <.input
+ field={f[:extractor_sleep_interval_seconds]}
+ placeholder="0"
+ type="number"
+ label="Sleep Interval (seconds)"
+ help="Sleep interval in seconds between each extractor request. Must be a positive whole number. Set to 0 to disable"
+ />
+
+ <.input
+ field={f[:download_throughput_limit]}
+ placeholder="4.2M"
+ label="Download Throughput"
+ help="Sets the max bytes-per-second throughput when downloading media. Examples: '50K' or '4.2M'. Leave blank to disable"
+ />
+
+ <.input
+ field={f[:restrict_filenames]}
+ type="toggle"
+ label="Restrict Filenames"
+ help="Restrict filenames to only ASCII characters and avoid ampersands/spaces in filenames"
+ />
diff --git a/lib/pinchflat_web/controllers/sources/source_controller.ex b/lib/pinchflat_web/controllers/sources/source_controller.ex
index 2df192f..678618b 100644
--- a/lib/pinchflat_web/controllers/sources/source_controller.ex
+++ b/lib/pinchflat_web/controllers/sources/source_controller.ex
@@ -1,53 +1,48 @@
defmodule PinchflatWeb.Sources.SourceController do
use PinchflatWeb, :controller
- use Pinchflat.Media.MediaQuery
+ use Pinchflat.Sources.SourcesQuery
alias Pinchflat.Repo
alias Pinchflat.Tasks
alias Pinchflat.Sources
alias Pinchflat.Sources.Source
- alias Pinchflat.Media.MediaItem
alias Pinchflat.Profiles.MediaProfile
+ alias Pinchflat.Media.FileSyncingWorker
+ alias Pinchflat.Sources.SourceDeletionWorker
alias Pinchflat.Downloading.DownloadingHelpers
alias Pinchflat.SlowIndexing.SlowIndexingHelpers
alias Pinchflat.Metadata.SourceMetadataStorageWorker
def index(conn, _params) do
- source_query =
- from s in Source,
- as: :source,
- inner_join: mp in assoc(s, :media_profile),
- preload: [media_profile: mp],
- order_by: [asc: s.custom_name],
- select: map(s, ^Source.__schema__(:fields)),
- select_merge: %{
- downloaded_count:
- subquery(
- from m in MediaItem,
- where: m.source_id == parent_as(:source).id,
- where: ^MediaQuery.downloaded(),
- select: count(m.id)
- ),
- pending_count:
- subquery(
- from m in MediaItem,
- join: s in assoc(m, :source),
- where: m.source_id == parent_as(:source).id,
- where: ^MediaQuery.pending(),
- select: count(m.id)
- )
- }
-
- render(conn, :index, sources: Repo.all(source_query))
+ render(conn, :index)
end
- def new(conn, _params) do
- changeset = Sources.change_source(%Source{})
+ def new(conn, params) do
+ # This lets me preload the settings from another source for more efficient creation
+ cs_struct =
+ case to_string(params["template_id"]) do
+ "" -> %Source{}
+ template_id -> Repo.get(Source, template_id) || %Source{}
+ end
render(conn, :new,
- changeset: changeset,
media_profiles: media_profiles(),
- layout: get_onboarding_layout()
+ layout: get_onboarding_layout(),
+ # Most of these don't actually _need_ to be nullified at this point,
+ # but if I don't do it now I know it'll bite me
+ changeset:
+ Sources.change_source(%Source{
+ cs_struct
+ | id: nil,
+ uuid: nil,
+ custom_name: nil,
+ description: nil,
+ collection_name: nil,
+ collection_id: nil,
+ collection_type: nil,
+ original_url: nil,
+ marked_for_deletion_at: nil
+ })
)
end
@@ -107,19 +102,15 @@ defmodule PinchflatWeb.Sources.SourceController do
end
def delete(conn, %{"id" => id} = params) do
- delete_files = Map.get(params, "delete_files", false)
+ # This awkward comparison converts the string to a boolean
+ delete_files = Map.get(params, "delete_files", "") == "true"
source = Sources.get_source!(id)
- {:ok, _source} = Sources.delete_source(source, delete_files: delete_files)
- flash_message =
- if delete_files do
- "Source and files deleted successfully."
- else
- "Source deleted successfully. Files were not deleted."
- end
+ {:ok, _} = Sources.update_source(source, %{marked_for_deletion_at: DateTime.utc_now()})
+ SourceDeletionWorker.kickoff(source, %{delete_files: delete_files})
conn
- |> put_flash(:info, flash_message)
+ |> put_flash(:info, "Source deletion started. This may take a while to complete.")
|> redirect(to: ~p"/sources")
end
@@ -159,6 +150,15 @@ defmodule PinchflatWeb.Sources.SourceController do
)
end
+ def sync_files_on_disk(conn, %{"source_id" => id}) do
+ wrap_forced_action(
+ conn,
+ id,
+ "File sync enqueued.",
+ &FileSyncingWorker.kickoff_with_task/1
+ )
+ end
+
defp wrap_forced_action(conn, source_id, message, fun) do
source = Sources.get_source!(source_id)
fun.(source)
diff --git a/lib/pinchflat_web/controllers/sources/source_html.ex b/lib/pinchflat_web/controllers/sources/source_html.ex
index bf61720..00ab498 100644
--- a/lib/pinchflat_web/controllers/sources/source_html.ex
+++ b/lib/pinchflat_web/controllers/sources/source_html.ex
@@ -9,6 +9,7 @@ defmodule PinchflatWeb.Sources.SourceHTML do
attr :changeset, Ecto.Changeset, required: true
attr :action, :string, required: true
attr :media_profiles, :list, required: true
+ attr :method, :string, required: true
def source_form(assigns)
@@ -26,10 +27,36 @@ defmodule PinchflatWeb.Sources.SourceHTML do
]
end
+ def friendly_cookie_behaviours do
+ [
+ {"Disabled", :disabled},
+ {"When Needed", :when_needed},
+ {"All Operations", :all_operations}
+ ]
+ end
+
+ def cutoff_date_presets do
+ [
+ {"7 days", compute_date_offset(7)},
+ {"14 days", compute_date_offset(14)},
+ {"30 days", compute_date_offset(30)},
+ {"60 days", compute_date_offset(60)},
+ {"90 days", compute_date_offset(90)},
+ {"180 days", compute_date_offset(180)},
+ {"365 days", compute_date_offset(365)}
+ ]
+ end
+
def rss_feed_url(conn, source) do
+ # NOTE: The reason for this concatenation is to avoid what appears to be a bug in Phoenix
+ # See: https://github.com/phoenixframework/phoenix/issues/6033
url(conn, ~p"/sources/#{source.uuid}/feed") <> ".xml"
end
+ def opml_feed_url(conn) do
+ url(conn, ~p"/sources/opml.xml?#{[route_token: Settings.get!(:route_token)]}")
+ end
+
def output_path_template_override_placeholders(media_profiles) do
media_profiles
|> Enum.map(&{&1.id, &1.output_path_template})
@@ -54,4 +81,13 @@ defmodule PinchflatWeb.Sources.SourceHTML do
Must end with .{{ ext }}. Same rules as Media Profile output path templates. #{help_button} to load your media profile's output template
"""
end
+
+ defp compute_date_offset(days) do
+ timezone = Application.get_env(:pinchflat, :timezone)
+
+ timezone
+ |> Timex.now()
+ |> Timex.shift(days: -days)
+ |> Timex.format!("{YYYY}-{0M}-{0D}")
+ end
end
diff --git a/lib/pinchflat_web/controllers/sources/source_html/actions_dropdown.html.heex b/lib/pinchflat_web/controllers/sources/source_html/actions_dropdown.html.heex
index ab5b7c1..ac00877 100644
--- a/lib/pinchflat_web/controllers/sources/source_html/actions_dropdown.html.heex
+++ b/lib/pinchflat_web/controllers/sources/source_html/actions_dropdown.html.heex
@@ -1,23 +1,21 @@
<.button_dropdown text="Actions" class="justify-center w-full sm:w-50">
<:option>
-
copied = true,
- () => copied = false
- )
- "}
- >
+ <.link href={rss_feed_url(@conn, @source)} x-data="{ copied: false }" x-on:click={~s"
+ $event.preventDefault();
+ copyWithCallbacks(
+ '#{rss_feed_url(@conn, @source)}',
+ () => copied = true,
+ () => copied = false
+ )
+ "}>
Copy RSS Feed
<.icon name="hero-check" class="ml-2 h-4 w-4" />
-
+
<:option>
copied = true,
() => copied = false
)
@@ -26,6 +24,11 @@
<.icon name="hero-check" class="ml-2 h-4 w-4" />
+ <:option>
+ <.link href={~p"/sources/new?template_id=#{@source}"} method="get">
+ Use as Template
+
+
<:option>
@@ -33,7 +36,7 @@
<.link
href={~p"/sources/#{@source}/force_download_pending"}
method="post"
- data-confirm="Are you sure you want to force a download of all *pending* media items? This isn't normally needed."
+ data-confirm="Are you sure you want to force a download of all pending media items? This isn't normally needed."
>
Download Pending
@@ -42,7 +45,7 @@
<.link
href={~p"/sources/#{@source}/force_redownload"}
method="post"
- data-confirm="Are you sure you want to re-download all currently downloaded media items? This isn't normally needed and won't change anything if the files already exist."
+ data-confirm="Are you sure you want to re-download all currently downloaded media items? This doesn't upgrade your media, but will download any missing files if your settings have changed. This isn't normally needed."
>
Redownload Existing
@@ -51,7 +54,7 @@
<.link
href={~p"/sources/#{@source}/force_index"}
method="post"
- data-confirm="Are you sure you want to force an index of this source? This isn't normally needed."
+ data-confirm="Are you sure you want index all content from this source? This isn't normally needed."
>
Force Index
@@ -65,6 +68,15 @@
Refresh Metadata
+ <:option>
+ <.link
+ href={~p"/sources/#{@source}/sync_files_on_disk"}
+ method="post"
+ data-confirm="Are you sure you want to sync files? This will update media items if their files have been deleted. File addition or moves are not detected. This isn't normally needed."
+ >
+ Sync Files on Disk
+
+
<:option>
diff --git a/lib/pinchflat_web/controllers/sources/source_html/edit.html.heex b/lib/pinchflat_web/controllers/sources/source_html/edit.html.heex
index 680d2e3..13b4fae 100644
--- a/lib/pinchflat_web/controllers/sources/source_html/edit.html.heex
+++ b/lib/pinchflat_web/controllers/sources/source_html/edit.html.heex
@@ -2,15 +2,20 @@
<.link href={~p"/sources"}>
<.icon name="hero-arrow-left" class="w-10 h-10 hover:dark:text-white" />
-
- Editing "<%= @source.custom_name %>"
+
+ Editing "{@source.custom_name}"
- <.source_form changeset={@changeset} media_profiles={@media_profiles} action={~p"/sources/#{@source}"} />
+ <.source_form
+ changeset={@changeset}
+ media_profiles={@media_profiles}
+ action={~p"/sources/#{@source}"}
+ method="patch"
+ />
diff --git a/lib/pinchflat_web/controllers/sources/source_html/index.html.heex b/lib/pinchflat_web/controllers/sources/source_html/index.html.heex
index 5913d30..1b23bc9 100644
--- a/lib/pinchflat_web/controllers/sources/source_html/index.html.heex
+++ b/lib/pinchflat_web/controllers/sources/source_html/index.html.heex
@@ -1,6 +1,17 @@
Sources
-
+
+ <.link href={opml_feed_url(@conn)} x-data="{ copied: false }" x-on:click={~s"
+ $event.preventDefault();
+ copyWithCallbacks(
+ '#{opml_feed_url(@conn)}',
+ () => copied = true,
+ () => copied = false
+ )
+ "}>
+ Copy OPML Feed
+ <.icon name="hero-check" class="ml-2 h-4 w-4" />
+
<.link href={~p"/sources/new"}>
<.button color="bg-primary" rounding="rounded-lg">
+ New Source
@@ -11,33 +22,12 @@
-
- <.table rows={@sources} table_class="text-black dark:text-white">
- <:col :let={source} label="Name">
- <.subtle_link href={~p"/sources/#{source.id}"}>
- <%= StringUtils.truncate(source.custom_name || source.collection_name, 35) %>
-
-
- <:col :let={source} label="Type"><%= source.collection_type %>
- <:col :let={source} label="Pending"><.localized_number number={source.pending_count} />
- <:col :let={source} label="Downloaded"><.localized_number number={source.downloaded_count} />
- <:col :let={source} label="Retention">
- <%= if source.retention_period_days && source.retention_period_days > 0 do %>
- <.localized_number number={source.retention_period_days} />
- <.pluralize count={source.retention_period_days} word="day" />
- <% else %>
- ∞
- <% end %>
-
- <:col :let={source} label="Media Profile">
- <.subtle_link href={~p"/media_profiles/#{source.media_profile_id}"}>
- <%= source.media_profile.name %>
-
-
- <:col :let={source} label="" class="flex place-content-evenly">
- <.icon_link href={~p"/sources/#{source.id}/edit"} icon="hero-pencil-square" class="mx-1" />
-
-
-
+ {live_render(@conn, PinchflatWeb.Sources.SourceLive.IndexTableLive,
+ session: %{
+ "initial_sort_key" => :custom_name,
+ "initial_sort_direction" => :asc,
+ "results_per_page" => 10
+ }
+ )}
diff --git a/lib/pinchflat_web/controllers/sources/source_html/index_table_live.ex b/lib/pinchflat_web/controllers/sources/source_html/index_table_live.ex
new file mode 100644
index 0000000..453073c
--- /dev/null
+++ b/lib/pinchflat_web/controllers/sources/source_html/index_table_live.ex
@@ -0,0 +1,103 @@
+defmodule PinchflatWeb.Sources.IndexTableLive do
+ use PinchflatWeb, :live_view
+ use Pinchflat.Media.MediaQuery
+ use Pinchflat.Sources.SourcesQuery
+
+ alias Pinchflat.Repo
+ alias Pinchflat.Sources
+ alias Pinchflat.Sources.Source
+ alias Pinchflat.Media.MediaItem
+
+ def render(assigns) do
+ ~H"""
+ <.table rows={@sources} table_class="text-white">
+ <:col :let={source} label="Name" class="truncate max-w-xs">
+ <.subtle_link href={~p"/sources/#{source.id}"}>
+ {source.custom_name || source.collection_name}
+
+
+ <:col :let={source} label="Pending">
+ <.subtle_link href={~p"/sources/#{source.id}/#tab-pending"}>
+ <.localized_number number={source.pending_count} />
+
+
+ <:col :let={source} label="Downloaded">
+ <.subtle_link href={~p"/sources/#{source.id}/#tab-downloaded"}>
+ <.localized_number number={source.downloaded_count} />
+
+
+ <:col :let={source} label="Retention">
+ <%= if source.retention_period_days && source.retention_period_days > 0 do %>
+ <.localized_number number={source.retention_period_days} />
+ <.pluralize count={source.retention_period_days} word="day" />
+ <% else %>
+ ∞
+ <% end %>
+
+ <:col :let={source} label="Media Profile">
+ <.subtle_link href={~p"/media_profiles/#{source.media_profile_id}"}>
+ {source.media_profile.name}
+
+
+ <:col :let={source} label="Enabled?">
+ <.input
+ name={"source[#{source.id}][enabled]"}
+ value={source.enabled}
+ id={"source_#{source.id}_enabled"}
+ phx-hook="formless-input"
+ data-subscribe="change"
+ data-event-name="toggle_enabled"
+ data-identifier={source.id}
+ type="toggle"
+ />
+
+ <:col :let={source} label="" class="flex place-content-evenly">
+ <.icon_link href={~p"/sources/#{source.id}/edit"} icon="hero-pencil-square" class="mx-1" />
+
+
+ """
+ end
+
+ def mount(_params, _session, socket) do
+ {:ok, assign(socket, %{sources: get_sources()})}
+ end
+
+ def handle_event("formless-input", %{"event" => "toggle_enabled"} = params, socket) do
+ source = Sources.get_source!(params["id"])
+ should_enable = params["value"] == "true"
+
+ {:ok, _} = Sources.update_source(source, %{enabled: should_enable})
+
+ {:noreply, assign(socket, %{sources: get_sources()})}
+ end
+
+ defp get_sources do
+ query =
+ from s in Source,
+ as: :source,
+ inner_join: mp in assoc(s, :media_profile),
+ where: is_nil(s.marked_for_deletion_at) and is_nil(mp.marked_for_deletion_at),
+ preload: [media_profile: mp],
+ order_by: [asc: s.custom_name],
+ select: map(s, ^Source.__schema__(:fields)),
+ select_merge: %{
+ downloaded_count:
+ subquery(
+ from m in MediaItem,
+ where: m.source_id == parent_as(:source).id,
+ where: ^MediaQuery.downloaded(),
+ select: count(m.id)
+ ),
+ pending_count:
+ subquery(
+ from m in MediaItem,
+ join: s in assoc(m, :source),
+ where: m.source_id == parent_as(:source).id,
+ where: ^MediaQuery.pending(),
+ select: count(m.id)
+ )
+ }
+
+ Repo.all(query)
+ end
+end
diff --git a/lib/pinchflat_web/controllers/sources/source_html/media_item_table_live.ex b/lib/pinchflat_web/controllers/sources/source_html/media_item_table_live.ex
index fd14a3a..de4c7ab 100644
--- a/lib/pinchflat_web/controllers/sources/source_html/media_item_table_live.ex
+++ b/lib/pinchflat_web/controllers/sources/source_html/media_item_table_live.ex
@@ -1,4 +1,4 @@
-defmodule Pinchflat.Sources.MediaItemTableLive do
+defmodule PinchflatWeb.Sources.MediaItemTableLive do
use PinchflatWeb, :live_view
use Pinchflat.Media.MediaQuery
@@ -23,7 +23,7 @@ defmodule Pinchflat.Sources.MediaItemTableLive do
<.icon_button icon_name="hero-arrow-path" class="h-10 w-10" phx-click="reload_page" tooltip="Refresh" />
-
+
Showing <.localized_number number={length(@records)} /> of <.localized_number number={@filtered_record_count} />
@@ -46,16 +46,28 @@ defmodule Pinchflat.Sources.MediaItemTableLive do
<.table rows={@records} table_class="text-white">
- <:col :let={media_item} label="Title">
- <.subtle_link href={~p"/sources/#{@source.id}/media/#{media_item.id}"}>
- <%= StringUtils.truncate(media_item.title, 50) %>
-
+ <:col :let={media_item} label="Title" class="max-w-xs">
+
+ <.tooltip
+ :if={media_item.last_error}
+ tooltip={media_item.last_error}
+ position="bottom-right"
+ tooltip_class="w-64"
+ >
+ <.icon name="hero-exclamation-circle-solid" class="text-red-500" />
+
+
+ <.subtle_link href={~p"/sources/#{@source.id}/media/#{media_item.id}"}>
+ {media_item.title}
+
+
+
<:col :let={media_item} :if={@media_state == "other"} label="Manually Ignored?">
<.icon name={if media_item.prevent_download, do: "hero-check", else: "hero-x-mark"} />
<:col :let={media_item} label="Upload Date">
- <%= DateTime.to_date(media_item.uploaded_at) %>
+ {DateTime.to_date(media_item.uploaded_at)}
<:col :let={media_item} label="" class="flex justify-end">
<.icon_link href={~p"/sources/#{@source.id}/media/#{media_item.id}/edit"} icon="hero-pencil-square" class="mr-4" />
@@ -205,6 +217,6 @@ defmodule Pinchflat.Sources.MediaItemTableLive do
# Selecting only what we need GREATLY speeds up queries on large tables
defp select_fields do
- [:id, :title, :uploaded_at, :prevent_download]
+ [:id, :title, :uploaded_at, :prevent_download, :last_error]
end
end
diff --git a/lib/pinchflat_web/controllers/sources/source_html/new.html.heex b/lib/pinchflat_web/controllers/sources/source_html/new.html.heex
index f144d9e..e4969f9 100644
--- a/lib/pinchflat_web/controllers/sources/source_html/new.html.heex
+++ b/lib/pinchflat_web/controllers/sources/source_html/new.html.heex
@@ -8,7 +8,7 @@
- <.source_form changeset={@changeset} media_profiles={@media_profiles} action={~p"/sources"} />
+ <.source_form changeset={@changeset} media_profiles={@media_profiles} action={~p"/sources"} method="post" />
diff --git a/lib/pinchflat_web/controllers/sources/source_html/show.html.heex b/lib/pinchflat_web/controllers/sources/source_html/show.html.heex
index 26fdc96..aefe7fe 100644
--- a/lib/pinchflat_web/controllers/sources/source_html/show.html.heex
+++ b/lib/pinchflat_web/controllers/sources/source_html/show.html.heex
@@ -1,10 +1,10 @@
-
+
<.link href={~p"/sources"}>
<.icon name="hero-arrow-left" class="w-10 h-10 hover:dark:text-white" />
-
- <%= @source.custom_name %>
+
+ {@source.custom_name}
@@ -24,50 +24,52 @@
<:tab title="Source" id="source">
-
-
Raw Attributes
+
- Media Profile:
- <.subtle_link href={~p"/media_profiles/#{@source.media_profile_id}"}>
- <%= @source.media_profile.name %>
-
-
+
Raw Attributes
+
+ Media Profile:
+ <.subtle_link href={~p"/media_profiles/#{@source.media_profile_id}"}>
+ {@source.media_profile.name}
+
+
- <.list_items_from_map map={Map.from_struct(@source)} />
+ <.list_items_from_map map={Map.from_struct(@source)} />
+
<:tab title="Pending" id="pending">
- <%= live_render(
+ {live_render(
@conn,
- Pinchflat.Sources.MediaItemTableLive,
+ PinchflatWeb.Sources.MediaItemTableLive,
session: %{"source_id" => @source.id, "media_state" => "pending"}
- ) %>
+ )}
<:tab title="Downloaded" id="downloaded">
- <%= live_render(
+ {live_render(
@conn,
- Pinchflat.Sources.MediaItemTableLive,
+ PinchflatWeb.Sources.MediaItemTableLive,
session: %{"source_id" => @source.id, "media_state" => "downloaded"}
- ) %>
+ )}
<:tab title="Other" id="other">
- <%= live_render(
+ {live_render(
@conn,
- Pinchflat.Sources.MediaItemTableLive,
+ PinchflatWeb.Sources.MediaItemTableLive,
session: %{"source_id" => @source.id, "media_state" => "other"}
- ) %>
+ )}
<:tab title="Tasks" id="tasks">
<%= if match?([_|_], @pending_tasks) do %>
<.table rows={@pending_tasks} table_class="text-black dark:text-white">
<:col :let={task} label="Worker">
- <%= task.job.worker %>
+ {task.job.worker}
<:col :let={task} label="State">
- <%= task.job.state %>
+ {task.job.state}
<:col :let={task} label="Scheduled At">
- <%= Calendar.strftime(task.job.scheduled_at, "%y-%m-%d %I:%M:%S %p %Z") %>
+ <.datetime_in_zone datetime={task.job.scheduled_at} />
<% else %>
diff --git a/lib/pinchflat_web/controllers/sources/source_html/source_form.html.heex b/lib/pinchflat_web/controllers/sources/source_html/source_form.html.heex
index 6705f99..014d597 100644
--- a/lib/pinchflat_web/controllers/sources/source_html/source_form.html.heex
+++ b/lib/pinchflat_web/controllers/sources/source_html/source_form.html.heex
@@ -2,6 +2,7 @@
:let={f}
for={@changeset}
action={@action}
+ method={@method}
x-data="{ advancedMode: !!JSON.parse(localStorage.getItem('advancedMode')) }"
x-init="$watch('advancedMode', value => localStorage.setItem('advancedMode', JSON.stringify(value)))"
>
@@ -10,8 +11,8 @@
-
-
+
+
General Options
@@ -19,6 +20,14 @@
+ <.input
+ field={f[:original_url]}
+ type="text"
+ label="Source URL"
+ help="URL of a channel or playlist (required)"
+ x-init="$el.focus()"
+ />
+
<.input
field={f[:custom_name]}
type="text"
@@ -26,8 +35,6 @@
help="Does not impact indexing or downloading. Will be inferred from the source if left blank"
/>
- <.input field={f[:original_url]} type="text" label="Source URL" help="URL of a channel or playlist (required)" />
-
<.input
field={f[:media_profile_id]}
options={Enum.map(@media_profiles, &{&1.name, &1.id})}
@@ -80,16 +87,57 @@
/>
<.input
- field={f[:download_cutoff_date]}
- type="text"
- label="Download Cutoff Date"
- placeholder="YYYY-MM-DD"
- maxlength="10"
- pattern="((?:19|20)[0-9][0-9])-(0[1-9]|1[012])-(0[1-9]|[12][0-9]|3[01])"
- title="YYYY-MM-DD"
- help="Only download media uploaded after this date. Leave blank to download all media. Must be in YYYY-MM-DD format"
+ field={f[:cookie_behaviour]}
+ options={friendly_cookie_behaviours()}
+ type="select"
+ label="Cookie Behaviour"
+ help="Uses your YouTube cookies for this source (if configured). 'When Needed' tries to minimize cookie usage except for certain indexing and downloading tasks. See docs"
/>
+
+ <.input
+ field={f[:min_duration_seconds]}
+ type="number"
+ label="Minimum Duration (seconds)"
+ min="0"
+ help="Minimum duration of the media to be downloaded. Can be blank"
+ />
+
+ <.input
+ field={f[:max_duration_seconds]}
+ type="number"
+ label="Maximum Duration (seconds)"
+ min="0"
+ help="Maximum duration of the media to be downloaded. Can be blank"
+ />
+
+
+
+ <.input
+ field={f[:download_cutoff_date]}
+ type="text"
+ label="Download Cutoff Date"
+ placeholder="YYYY-MM-DD"
+ maxlength="10"
+ pattern="((?:19|20)[0-9][0-9])-(0[1-9]|1[012])-(0[1-9]|[12][0-9]|3[01])"
+ title="YYYY-MM-DD"
+ help="Only download media uploaded after this date. Leave blank to download all media. Must be in YYYY-MM-DD format. Old media may be deleted or downloaded if you change this date"
+ x-model="cutoffDate"
+ >
+ <:input_append>
+ <.input
+ prompt="Select preset"
+ name="download_cutoff_date_preset"
+ value=""
+ options={cutoff_date_presets()}
+ type="select"
+ inputclass="w-30 lg:w-60 ml-2 md:ml-4"
+ x-on:change={"cutoffDate = $event.target.value || '#{f[:download_cutoff_date].value}'"}
+ />
+
+
+
+
<.input
field={f[:retention_period_days]}
type="number"
diff --git a/lib/pinchflat_web/controllers/sources/source_live/index_table_live.ex b/lib/pinchflat_web/controllers/sources/source_live/index_table_live.ex
new file mode 100644
index 0000000..a9f63c8
--- /dev/null
+++ b/lib/pinchflat_web/controllers/sources/source_live/index_table_live.ex
@@ -0,0 +1,108 @@
+defmodule PinchflatWeb.Sources.SourceLive.IndexTableLive do
+ use PinchflatWeb, :live_view
+ use Pinchflat.Media.MediaQuery
+ use Pinchflat.Sources.SourcesQuery
+
+ import PinchflatWeb.Helpers.SortingHelpers
+ import PinchflatWeb.Helpers.PaginationHelpers
+
+ alias Pinchflat.Repo
+ alias Pinchflat.Sources.Source
+ alias Pinchflat.Media.MediaItem
+
+ def mount(_params, session, socket) do
+ limit = session["results_per_page"]
+
+ initial_params =
+ Map.merge(
+ %{
+ sort_key: session["initial_sort_key"],
+ sort_direction: session["initial_sort_direction"]
+ },
+ get_pagination_attributes(sources_query(), 1, limit)
+ )
+
+ socket
+ |> assign(initial_params)
+ |> set_sources()
+ |> then(&{:ok, &1})
+ end
+
+ def handle_event("page_change", %{"direction" => direction}, %{assigns: assigns} = socket) do
+ new_page = update_page_number(assigns.page, direction, assigns.total_pages)
+
+ socket
+ |> assign(get_pagination_attributes(sources_query(), new_page, assigns.limit))
+ |> set_sources()
+ |> then(&{:noreply, &1})
+ end
+
+ def handle_event("sort_update", %{"sort_key" => sort_key}, %{assigns: assigns} = socket) do
+ new_sort_key = String.to_existing_atom(sort_key)
+
+ new_params = %{
+ sort_key: new_sort_key,
+ sort_direction: get_sort_direction(assigns.sort_key, new_sort_key, assigns.sort_direction)
+ }
+
+ socket
+ |> assign(new_params)
+ |> set_sources()
+ |> then(&{:noreply, &1})
+ end
+
+ defp sort_attr(:pending_count), do: dynamic([s, mp, dl, pe], pe.pending_count)
+ defp sort_attr(:downloaded_count), do: dynamic([s, mp, dl], dl.downloaded_count)
+ defp sort_attr(:media_size_bytes), do: dynamic([s, mp, dl], dl.media_size_bytes)
+ defp sort_attr(:media_profile_name), do: dynamic([s, mp], fragment("? COLLATE NOCASE", mp.name))
+ defp sort_attr(:custom_name), do: dynamic([s], fragment("? COLLATE NOCASE", s.custom_name))
+ defp sort_attr(:enabled), do: dynamic([s], s.enabled)
+
+ defp set_sources(%{assigns: assigns} = socket) do
+ sources =
+ sources_query()
+ |> order_by(^[{assigns.sort_direction, sort_attr(assigns.sort_key)}, asc: :id])
+ |> limit(^assigns.limit)
+ |> offset(^assigns.offset)
+ |> Repo.all()
+
+ assign(socket, %{sources: sources})
+ end
+
+ defp sources_query do
+ downloaded_subquery =
+ from(
+ m in MediaItem,
+ select: %{downloaded_count: count(m.id), source_id: m.source_id, media_size_bytes: sum(m.media_size_bytes)},
+ where: ^MediaQuery.downloaded(),
+ group_by: m.source_id
+ )
+
+ pending_subquery =
+ from(
+ m in MediaItem,
+ inner_join: s in assoc(m, :source),
+ inner_join: mp in assoc(s, :media_profile),
+ select: %{pending_count: count(m.id), source_id: m.source_id},
+ where: ^MediaQuery.pending(),
+ group_by: m.source_id
+ )
+
+ from s in Source,
+ as: :source,
+ inner_join: mp in assoc(s, :media_profile),
+ left_join: d in subquery(downloaded_subquery),
+ on: d.source_id == s.id,
+ left_join: p in subquery(pending_subquery),
+ on: p.source_id == s.id,
+ on: d.source_id == s.id,
+ where: is_nil(s.marked_for_deletion_at) and is_nil(mp.marked_for_deletion_at),
+ preload: [media_profile: mp],
+ select: map(s, ^Source.__schema__(:fields)),
+ select_merge: %{
+ downloaded_count: coalesce(d.downloaded_count, 0),
+ pending_count: coalesce(p.pending_count, 0),
+ media_size_bytes: coalesce(d.media_size_bytes, 0)
+ }
+ end
+end
diff --git a/lib/pinchflat_web/controllers/sources/source_live/index_table_live.html.heex b/lib/pinchflat_web/controllers/sources/source_live/index_table_live.html.heex
new file mode 100644
index 0000000..e559e27
--- /dev/null
+++ b/lib/pinchflat_web/controllers/sources/source_live/index_table_live.html.heex
@@ -0,0 +1,41 @@
+
+ <.table rows={@sources} table_class="text-white" sort_key={@sort_key} sort_direction={@sort_direction}>
+ <:col :let={source} label="Name" sort_key="custom_name" class="truncate max-w-xs">
+ <.subtle_link href={~p"/sources/#{source.id}"}>
+ {source.custom_name}
+
+
+ <:col :let={source} label="Pending" sort_key="pending_count">
+ <.subtle_link href={~p"/sources/#{source.id}/#tab-pending"}>
+ <.localized_number number={source.pending_count} />
+
+
+ <:col :let={source} label="Downloaded" sort_key="downloaded_count">
+ <.subtle_link href={~p"/sources/#{source.id}/#tab-downloaded"}>
+ <.localized_number number={source.downloaded_count} />
+
+
+ <:col :let={source} label="Size" sort_key="media_size_bytes">
+ <.readable_filesize byte_size={source.media_size_bytes} />
+
+ <:col :let={source} label="Media Profile" sort_key="media_profile_name" class="truncate max-w-xs">
+ <.subtle_link href={~p"/media_profiles/#{source.media_profile_id}"}>
+ {source.media_profile.name}
+
+
+ <:col :let={source} label="Enabled?" sort_key="enabled">
+ <.live_component
+ module={PinchflatWeb.Sources.SourceLive.SourceEnableToggle}
+ source={source}
+ id={"source_#{source.id}_enabled"}
+ />
+
+ <:col :let={source} label="" class="flex place-content-evenly">
+ <.icon_link href={~p"/sources/#{source.id}/edit"} icon="hero-pencil-square" class="mx-1" />
+
+
+
+
+ <.live_pagination_controls page_number={@page} total_pages={@total_pages} />
+
+
diff --git a/lib/pinchflat_web/controllers/sources/source_live/source_enable_toggle.ex b/lib/pinchflat_web/controllers/sources/source_live/source_enable_toggle.ex
new file mode 100644
index 0000000..57bc3b1
--- /dev/null
+++ b/lib/pinchflat_web/controllers/sources/source_live/source_enable_toggle.ex
@@ -0,0 +1,35 @@
+defmodule PinchflatWeb.Sources.SourceLive.SourceEnableToggle do
+ use PinchflatWeb, :live_component
+
+ alias Pinchflat.Sources
+ alias Pinchflat.Sources.Source
+
+ def render(assigns) do
+ ~H"""
+
+ <.form :let={f} for={@form} phx-change="update" phx-target={@myself} class="enabled_toggle_form">
+ <.input id={"source_#{@source_id}_enabled_input"} field={f[:enabled]} type="toggle" />
+
+
+ """
+ end
+
+ def update(assigns, socket) do
+ initial_data = %{
+ source_id: assigns.source.id,
+ form: Sources.change_source(%Source{}, assigns.source)
+ }
+
+ socket
+ |> assign(initial_data)
+ |> then(&{:ok, &1})
+ end
+
+ def handle_event("update", %{"source" => source_params}, %{assigns: assigns} = socket) do
+ assigns.source_id
+ |> Sources.get_source!()
+ |> Sources.update_source(source_params)
+
+ {:noreply, socket}
+ end
+end
diff --git a/lib/pinchflat_web/endpoint.ex b/lib/pinchflat_web/endpoint.ex
index bcf53c0..bf094ed 100644
--- a/lib/pinchflat_web/endpoint.ex
+++ b/lib/pinchflat_web/endpoint.ex
@@ -20,7 +20,7 @@ defmodule PinchflatWeb.Endpoint do
plug Plug.Static,
at: "/",
from: :pinchflat,
- gzip: Mix.env() == :prod,
+ gzip: Application.compile_env(:pinchflat, :env) == :prod,
only: PinchflatWeb.static_paths()
# Code reloading can be explicitly enabled under the
@@ -32,12 +32,17 @@ defmodule PinchflatWeb.Endpoint do
plug Phoenix.Ecto.CheckRepoStatus, otp_app: :pinchflat
end
+ plug PromEx.Plug, prom_ex_module: Pinchflat.PromEx
+
plug Phoenix.LiveDashboard.RequestLogger,
param_key: "request_logger",
cookie_key: "request_logger"
plug Plug.RequestId
- plug Plug.Telemetry, event_prefix: [:phoenix, :endpoint]
+
+ plug Plug.Telemetry,
+ event_prefix: [:phoenix, :endpoint],
+ log: {__MODULE__, :log_level, []}
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
@@ -53,6 +58,10 @@ defmodule PinchflatWeb.Endpoint do
plug PinchflatWeb.Router
+ # Disables logging in Plug.Telemetry for healthcheck requests
+ def log_level(%Plug.Conn{path_info: ["healthcheck"]}), do: false
+ def log_level(_), do: :info
+
# URLs need to be generated using the host of the current page being accessed
# for things like Podcast RSS feeds to contain links to the right location.
#
@@ -73,6 +82,13 @@ defmodule PinchflatWeb.Endpoint do
Phoenix.Controller.put_router_url(conn, new_base_url)
end
+ # Some podcast clients require file extensions, and others still will _add_
+ # file extensions to XML files if they don't have them. This plug removes
+ # the extension from the path so that the correct route is matched, regardless
+ # of the provided extension.
+ #
+ # This has the downside of in-app generated verified routes not working with
+ # extensions so this behaviour may change in the future.
defp strip_trailing_extension(%{path_info: []} = conn, _opts), do: conn
defp strip_trailing_extension(conn, _opts) do
diff --git a/lib/pinchflat_web/gettext.ex b/lib/pinchflat_web/gettext.ex
index 9c042a7..6aa0242 100644
--- a/lib/pinchflat_web/gettext.ex
+++ b/lib/pinchflat_web/gettext.ex
@@ -5,7 +5,7 @@ defmodule PinchflatWeb.Gettext do
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
- import PinchflatWeb.Gettext
+ use Gettext, backend: PinchflatWeb.Gettext
# Simple translation
gettext("Here is the string to translate")
@@ -20,5 +20,5 @@ defmodule PinchflatWeb.Gettext do
See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
"""
- use Gettext, otp_app: :pinchflat
+ use Gettext.Backend, otp_app: :pinchflat
end
diff --git a/lib/pinchflat_web/helpers/pagination_helpers.ex b/lib/pinchflat_web/helpers/pagination_helpers.ex
new file mode 100644
index 0000000..50c3928
--- /dev/null
+++ b/lib/pinchflat_web/helpers/pagination_helpers.ex
@@ -0,0 +1,45 @@
+defmodule PinchflatWeb.Helpers.PaginationHelpers do
+ @moduledoc """
+ Methods for working with pagination, usually in the context of LiveViews or LiveComponents.
+
+ These methods are fairly simple, but they're commonly repeated across different Live entities
+ """
+
+ alias Pinchflat.Repo
+ alias Pinchflat.Utils.NumberUtils
+
+ @doc """
+ Given a query, a page number, and a number of records per page, returns a map of pagination attributes.
+
+ Returns map()
+ """
+ def get_pagination_attributes(query, page, records_per_page) do
+ total_record_count = Repo.aggregate(query, :count, :id)
+ total_pages = max(ceil(total_record_count / records_per_page), 1)
+ clamped_page = NumberUtils.clamp(page, 1, total_pages)
+
+ %{
+ page: clamped_page,
+ total_pages: total_pages,
+ total_record_count: total_record_count,
+ limit: records_per_page,
+ offset: (clamped_page - 1) * records_per_page
+ }
+ end
+
+ @doc """
+ Given a current page number, a direction to move in, and the total number of pages, returns the updated page number.
+ The updated page number is clamped to the range [1, total_pages].
+
+ Returns integer()
+ """
+ def update_page_number(current_page, direction, total_pages) do
+ updated_page =
+ case to_string(direction) do
+ "inc" -> current_page + 1
+ "dec" -> current_page - 1
+ end
+
+ NumberUtils.clamp(updated_page, 1, total_pages)
+ end
+end
diff --git a/lib/pinchflat_web/helpers/sorting_helpers.ex b/lib/pinchflat_web/helpers/sorting_helpers.ex
new file mode 100644
index 0000000..fb65e62
--- /dev/null
+++ b/lib/pinchflat_web/helpers/sorting_helpers.ex
@@ -0,0 +1,20 @@
+defmodule PinchflatWeb.Helpers.SortingHelpers do
+ @moduledoc """
+ Methods for working with sorting, usually in the context of LiveViews or LiveComponents.
+
+ These methods are fairly simple, but they're commonly repeated across different Live entities
+ """
+
+ @doc """
+ Given the old sort attribute, the new sort attribute, and the old sort direction, returns the new sort direction.
+
+ Returns :asc | :desc
+ """
+ def get_sort_direction(old_sort_attr, new_sort_attr, old_sort_direction) do
+ case {new_sort_attr, old_sort_direction} do
+ {^old_sort_attr, :desc} -> :asc
+ {^old_sort_attr, _} -> :desc
+ _ -> :asc
+ end
+ end
+end
diff --git a/lib/pinchflat_web/plugs.ex b/lib/pinchflat_web/plugs.ex
new file mode 100644
index 0000000..f905d6f
--- /dev/null
+++ b/lib/pinchflat_web/plugs.ex
@@ -0,0 +1,66 @@
+defmodule PinchflatWeb.Plugs do
+ @moduledoc """
+ Custom plugs for PinchflatWeb.
+ """
+
+ use PinchflatWeb, :router
+ alias Pinchflat.Settings
+
+ @doc """
+ If the `expose_feed_endpoints` setting is true, this plug does nothing. Otherwise, it calls `basic_auth/2`.
+ """
+ def maybe_basic_auth(conn, opts) do
+ if Application.get_env(:pinchflat, :expose_feed_endpoints) do
+ conn
+ else
+ basic_auth(conn, opts)
+ end
+ end
+
+ @doc """
+ If the `basic_auth_username` and `basic_auth_password` settings are set, this plug calls `Plug.BasicAuth.basic_auth/3`.
+ """
+ def basic_auth(conn, _opts) do
+ username = Application.get_env(:pinchflat, :basic_auth_username)
+ password = Application.get_env(:pinchflat, :basic_auth_password)
+
+ if credential_set?(username) && credential_set?(password) do
+ Plug.BasicAuth.basic_auth(conn, username: username, password: password, realm: "Pinchflat")
+ else
+ conn
+ end
+ end
+
+ @doc """
+ Removes the `x-frame-options` header from the response to allow the page to be embedded in an iframe.
+ """
+ def allow_iframe_embed(conn, _opts) do
+ delete_resp_header(conn, "x-frame-options")
+ end
+
+ @doc """
+ If the `route_token` query parameter matches the `route_token` setting, this plug does nothing.
+ Otherwise, it sends a 401 response.
+ """
+ def token_protected_route(%{query_params: %{"route_token" => route_token}} = conn, _opts) do
+ if Settings.get!(:route_token) == route_token do
+ conn
+ else
+ send_unauthorized(conn)
+ end
+ end
+
+ def token_protected_route(conn, _opts) do
+ send_unauthorized(conn)
+ end
+
+ defp credential_set?(credential) do
+ credential && credential != ""
+ end
+
+ defp send_unauthorized(conn) do
+ conn
+ |> send_resp(:unauthorized, "Unauthorized")
+ |> halt()
+ end
+end
diff --git a/lib/pinchflat_web/router.ex b/lib/pinchflat_web/router.ex
index 1e1ee7d..9586f13 100644
--- a/lib/pinchflat_web/router.ex
+++ b/lib/pinchflat_web/router.ex
@@ -1,5 +1,6 @@
defmodule PinchflatWeb.Router do
use PinchflatWeb, :router
+ import PinchflatWeb.Plugs
import Phoenix.LiveDashboard.Router
# IMPORTANT: `strip_trailing_extension` in endpoint.ex removes
@@ -19,8 +20,23 @@ defmodule PinchflatWeb.Router do
plug :accepts, ["json"]
end
- pipeline :feeds do
- plug :maybe_basic_auth
+ scope "/", PinchflatWeb do
+ pipe_through [:maybe_basic_auth, :token_protected_route]
+
+ # has to match before /sources/:id
+ get "/sources/opml", Podcasts.PodcastController, :opml_feed
+ end
+
+ # Routes in here _may not be_ protected by basic auth. This is necessary for
+ # media streaming to work for RSS podcast feeds.
+ scope "/", PinchflatWeb do
+ pipe_through :maybe_basic_auth
+
+ get "/sources/:uuid/feed", Podcasts.PodcastController, :rss_feed
+ get "/sources/:uuid/feed_image", Podcasts.PodcastController, :feed_image
+ get "/media/:uuid/episode_image", Podcasts.PodcastController, :episode_image
+
+ get "/media/:uuid/stream", MediaItems.MediaItemController, :stream
end
scope "/", PinchflatWeb do
@@ -40,6 +56,7 @@ defmodule PinchflatWeb.Router do
post "/force_redownload", Sources.SourceController, :force_redownload
post "/force_index", Sources.SourceController, :force_index
post "/force_metadata_refresh", Sources.SourceController, :force_metadata_refresh
+ post "/sync_files_on_disk", Sources.SourceController, :sync_files_on_disk
resources "/media", MediaItems.MediaItemController, only: [:show, :edit, :update, :delete] do
post "/force_download", MediaItems.MediaItemController, :force_download
@@ -47,23 +64,11 @@ defmodule PinchflatWeb.Router do
end
end
- # Routes in here _may not be_ protected by basic auth. This is necessary for
- # media streaming to work for RSS podcast feeds.
- scope "/", PinchflatWeb do
- pipe_through :feeds
-
- get "/sources/:uuid/feed", Podcasts.PodcastController, :rss_feed
- get "/sources/:uuid/feed_image", Podcasts.PodcastController, :feed_image
- get "/media/:uuid/episode_image", Podcasts.PodcastController, :episode_image
-
- get "/media/:uuid/stream", MediaItems.MediaItemController, :stream
- end
-
# No auth or CSRF protection for the health check endpoint
scope "/", PinchflatWeb do
pipe_through :api
- get "/healthcheck", HealthController, :check
+ get "/healthcheck", HealthController, :check, log: false
end
scope "/dev" do
@@ -73,31 +78,4 @@ defmodule PinchflatWeb.Router do
metrics: PinchflatWeb.Telemetry,
ecto_repos: [Pinchflat.Repo]
end
-
- defp maybe_basic_auth(conn, opts) do
- if Application.get_env(:pinchflat, :expose_feed_endpoints) do
- conn
- else
- basic_auth(conn, opts)
- end
- end
-
- defp basic_auth(conn, _opts) do
- username = Application.get_env(:pinchflat, :basic_auth_username)
- password = Application.get_env(:pinchflat, :basic_auth_password)
-
- if credential_set?(username) && credential_set?(password) do
- Plug.BasicAuth.basic_auth(conn, username: username, password: password, realm: "Pinchflat")
- else
- conn
- end
- end
-
- defp credential_set?(credential) do
- credential && credential != ""
- end
-
- defp allow_iframe_embed(conn, _opts) do
- delete_resp_header(conn, "x-frame-options")
- end
end
diff --git a/mix.exs b/mix.exs
index 81ff018..27e5f85 100644
--- a/mix.exs
+++ b/mix.exs
@@ -4,9 +4,10 @@ defmodule Pinchflat.MixProject do
def project do
[
app: :pinchflat,
- version: "2024.6.25",
+ version: "2025.9.26",
elixir: "~> 1.17",
elixirc_paths: elixirc_paths(Mix.env()),
+ elixirc_options: [warnings_as_errors: System.get_env("EX_CHECK") == "1"],
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps(),
@@ -46,34 +47,36 @@ defmodule Pinchflat.MixProject do
# Type `mix help deps` for examples and options.
defp deps do
[
- {:phoenix, "~> 1.7.10"},
+ {:phoenix, "~> 1.7.21"},
{:phoenix_ecto, "~> 4.4"},
- {:ecto, "~> 3.11.2"},
- {:ecto_sql, "~> 3.10"},
+ {:ecto, "~> 3.12.3"},
+ {:ecto_sql, "~> 3.12"},
{:ecto_sqlite3, ">= 0.0.0"},
{:ecto_sqlite3_extras, "~> 1.2.0"},
- {:phoenix_html, "~> 3.3"},
+ {:phoenix_html, "~> 4.2"},
{:phoenix_live_reload, "~> 1.2", only: :dev},
- {:phoenix_live_view, "~> 0.20.1"},
- {:floki, ">= 0.30.0", only: :test},
+ {:phoenix_live_view, "~> 1.0.0"},
+ {:floki, ">= 0.36.0", only: :test},
{:phoenix_live_dashboard, "~> 0.8.2"},
{:esbuild, "~> 0.8", runtime: Mix.env() == :dev},
{:tailwind, "~> 0.2.0", runtime: Mix.env() == :dev},
{:swoosh, "~> 1.3"},
- {:finch, "~> 0.13"},
- {:telemetry_metrics, "~> 0.6"},
- {:telemetry_poller, "~> 1.0"},
+ {:finch, "~> 0.18"},
+ {:telemetry_metrics, "~> 1.0"},
+ {:telemetry_poller, "~> 1.1"},
{:gettext, "~> 0.20"},
{:jason, "~> 1.2"},
- {:dns_cluster, "~> 0.1.1"},
+ {:dns_cluster, "~> 0.2"},
{:plug_cowboy, "~> 2.5"},
- {:oban, "~> 2.16"},
+ {:oban, "~> 2.17"},
{:nimble_parsec, "~> 1.4"},
- {:timex, "~> 3.0"},
+ # See: https://github.com/bitwalker/timex/issues/778
+ {:timex, git: "https://github.com/bitwalker/timex.git", ref: "cc649c7a586f1266b17d57aff3c6eb1a56116ca2"},
+ {:prom_ex, "~> 1.11.0"},
{:mox, "~> 1.0", only: :test},
{:credo, "~> 1.7.7", only: [:dev, :test], runtime: false},
{:credo_naming, "~> 2.1", only: [:dev, :test], runtime: false},
- {:ex_check, "~> 0.14.0", only: [:dev, :test], runtime: false},
+ {:ex_check, "~> 0.16.0", only: [:dev, :test], runtime: false},
{:faker, "~> 0.17", only: :test},
{:sobelow, "~> 0.13", only: [:dev, :test], runtime: false}
]
@@ -103,7 +106,8 @@ defmodule Pinchflat.MixProject do
"ecto.rollback": [
"ecto.rollback",
~s(cmd [ -z "$MIX_ENV" ] && yarn run create-erd || echo "No ERD generated")
- ]
+ ],
+ "version.bump": "cmd ./tooling/version_bump.sh"
]
end
end
diff --git a/mix.lock b/mix.lock
index 180ba25..c316d72 100644
--- a/mix.lock
+++ b/mix.lock
@@ -1,68 +1,73 @@
%{
"bunt": {:hex, :bunt, "1.0.0", "081c2c665f086849e6d57900292b3a161727ab40431219529f13c4ddcf3e7a44", [:mix], [], "hexpm", "dc5f86aa08a5f6fa6b8096f0735c4e76d54ae5c9fa2c143e5a1fc7c1cd9bb6b5"},
- "castore": {:hex, :castore, "1.0.5", "9eeebb394cc9a0f3ae56b813459f990abb0a3dedee1be6b27fdb50301930502f", [:mix], [], "hexpm", "8d7c597c3e4a64c395980882d4bca3cebb8d74197c590dc272cfd3b6a6310578"},
- "cc_precompiler": {:hex, :cc_precompiler, "0.1.9", "e8d3364f310da6ce6463c3dd20cf90ae7bbecbf6c5203b98bf9b48035592649b", [:mix], [{:elixir_make, "~> 0.7", [hex: :elixir_make, repo: "hexpm", optional: false]}], "hexpm", "9dcab3d0f3038621f1601f13539e7a9ee99843862e66ad62827b0c42b2f58a54"},
- "certifi": {:hex, :certifi, "2.12.0", "2d1cca2ec95f59643862af91f001478c9863c2ac9cb6e2f89780bfd8de987329", [:rebar3], [], "hexpm", "ee68d85df22e554040cdb4be100f33873ac6051387baf6a8f6ce82272340ff1c"},
+ "castore": {:hex, :castore, "1.0.14", "4582dd7d630b48cf5e1ca8d3d42494db51e406b7ba704e81fbd401866366896a", [:mix], [], "hexpm", "7bc1b65249d31701393edaaac18ec8398d8974d52c647b7904d01b964137b9f4"},
+ "cc_precompiler": {:hex, :cc_precompiler, "0.1.10", "47c9c08d8869cf09b41da36538f62bc1abd3e19e41701c2cea2675b53c704258", [:mix], [{:elixir_make, "~> 0.7", [hex: :elixir_make, repo: "hexpm", optional: false]}], "hexpm", "f6e046254e53cd6b41c6bacd70ae728011aa82b2742a80d6e2214855c6e06b22"},
+ "certifi": {:hex, :certifi, "2.15.0", "0e6e882fcdaaa0a5a9f2b3db55b1394dba07e8d6d9bcad08318fb604c6839712", [:rebar3], [], "hexpm", "b147ed22ce71d72eafdad94f055165c1c182f61a2ff49df28bcc71d1d5b94a60"},
"combine": {:hex, :combine, "0.10.0", "eff8224eeb56498a2af13011d142c5e7997a80c8f5b97c499f84c841032e429f", [:mix], [], "hexpm", "1b1dbc1790073076580d0d1d64e42eae2366583e7aecd455d1215b0d16f2451b"},
- "cowboy": {:hex, :cowboy, "2.10.0", "ff9ffeff91dae4ae270dd975642997afe2a1179d94b1887863e43f681a203e26", [:make, :rebar3], [{:cowlib, "2.12.1", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "1.8.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "3afdccb7183cc6f143cb14d3cf51fa00e53db9ec80cdcd525482f5e99bc41d6b"},
+ "cowboy": {:hex, :cowboy, "2.13.0", "09d770dd5f6a22cc60c071f432cd7cb87776164527f205c5a6b0f24ff6b38990", [:make, :rebar3], [{:cowlib, ">= 2.14.0 and < 3.0.0", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, ">= 1.8.0 and < 3.0.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "e724d3a70995025d654c1992c7b11dbfea95205c047d86ff9bf1cda92ddc5614"},
"cowboy_telemetry": {:hex, :cowboy_telemetry, "0.4.0", "f239f68b588efa7707abce16a84d0d2acf3a0f50571f8bb7f56a15865aae820c", [:rebar3], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "7d98bac1ee4565d31b62d59f8823dfd8356a169e7fcbb83831b8a5397404c9de"},
- "cowlib": {:hex, :cowlib, "2.12.1", "a9fa9a625f1d2025fe6b462cb865881329b5caff8f1854d1cbc9f9533f00e1e1", [:make, :rebar3], [], "hexpm", "163b73f6367a7341b33c794c4e88e7dbfe6498ac42dcd69ef44c5bc5507c8db0"},
- "credo": {:hex, :credo, "1.7.7", "771445037228f763f9b2afd612b6aa2fd8e28432a95dbbc60d8e03ce71ba4446", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "8bc87496c9aaacdc3f90f01b7b0582467b69b4bd2441fe8aae3109d843cc2f2e"},
+ "cowlib": {:hex, :cowlib, "2.15.0", "3c97a318a933962d1c12b96ab7c1d728267d2c523c25a5b57b0f93392b6e9e25", [:make, :rebar3], [], "hexpm", "4f00c879a64b4fe7c8fcb42a4281925e9ffdb928820b03c3ad325a617e857532"},
+ "credo": {:hex, :credo, "1.7.12", "9e3c20463de4b5f3f23721527fcaf16722ec815e70ff6c60b86412c695d426c1", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "8493d45c656c5427d9c729235b99d498bd133421f3e0a683e5c1b561471291e5"},
"credo_naming": {:hex, :credo_naming, "2.1.0", "d44ad58890d4db552e141ce64756a74ac1573665af766d1ac64931aa90d47744", [:make, :mix], [{:credo, "~> 1.6", [hex: :credo, repo: "hexpm", optional: false]}], "hexpm", "830e23b3fba972e2fccec49c0c089fe78c1e64bc16782a2682d78082351a2909"},
- "db_connection": {:hex, :db_connection, "2.6.0", "77d835c472b5b67fc4f29556dee74bf511bbafecdcaf98c27d27fa5918152086", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "c2f992d15725e721ec7fbc1189d4ecdb8afef76648c746a8e1cad35e3b8a35f3"},
- "decimal": {:hex, :decimal, "2.1.1", "5611dca5d4b2c3dd497dec8f68751f1f1a54755e8ed2a966c2633cf885973ad6", [:mix], [], "hexpm", "53cfe5f497ed0e7771ae1a475575603d77425099ba5faef9394932b35020ffcc"},
- "dns_cluster": {:hex, :dns_cluster, "0.1.2", "3eb5be824c7888dadf9781018e1a5f1d3d1113b333c50bce90fb1b83df1015f2", [:mix], [], "hexpm", "7494272040f847637bbdb01bcdf4b871e82daf09b813e7d3cb3b84f112c6f2f8"},
- "ecto": {:hex, :ecto, "3.11.2", "e1d26be989db350a633667c5cda9c3d115ae779b66da567c68c80cfb26a8c9ee", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "3c38bca2c6f8d8023f2145326cc8a80100c3ffe4dcbd9842ff867f7fc6156c65"},
- "ecto_sql": {:hex, :ecto_sql, "3.11.1", "e9abf28ae27ef3916b43545f9578b4750956ccea444853606472089e7d169470", [:mix], [{:db_connection, "~> 2.4.1 or ~> 2.5", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.11.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.6.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.16.0 or ~> 0.17.0 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1 or ~> 2.2", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "ce14063ab3514424276e7e360108ad6c2308f6d88164a076aac8a387e1fea634"},
- "ecto_sqlite3": {:hex, :ecto_sqlite3, "0.15.1", "40f2fbd9e246455f8c42e7e0a77009ef806caa1b3ce6f717b2a0a80e8432fcfd", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:ecto, "~> 3.11", [hex: :ecto, repo: "hexpm", optional: false]}, {:ecto_sql, "~> 3.11", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:exqlite, "~> 0.19", [hex: :exqlite, repo: "hexpm", optional: false]}], "hexpm", "28b16e177123c688948357176662bf9ff9084daddf950ef5b6baf3ee93707064"},
+ "db_connection": {:hex, :db_connection, "2.7.0", "b99faa9291bb09892c7da373bb82cba59aefa9b36300f6145c5f201c7adf48ec", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "dcf08f31b2701f857dfc787fbad78223d61a32204f217f15e881dd93e4bdd3ff"},
+ "decimal": {:hex, :decimal, "2.3.0", "3ad6255aa77b4a3c4f818171b12d237500e63525c2fd056699967a3e7ea20f62", [:mix], [], "hexpm", "a4d66355cb29cb47c3cf30e71329e58361cfcb37c34235ef3bf1d7bf3773aeac"},
+ "dns_cluster": {:hex, :dns_cluster, "0.2.0", "aa8eb46e3bd0326bd67b84790c561733b25c5ba2fe3c7e36f28e88f384ebcb33", [:mix], [], "hexpm", "ba6f1893411c69c01b9e8e8f772062535a4cf70f3f35bcc964a324078d8c8240"},
+ "ecto": {:hex, :ecto, "3.12.5", "4a312960ce612e17337e7cefcf9be45b95a3be6b36b6f94dfb3d8c361d631866", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "6eb18e80bef8bb57e17f5a7f068a1719fbda384d40fc37acb8eb8aeca493b6ea"},
+ "ecto_sql": {:hex, :ecto_sql, "3.12.1", "c0d0d60e85d9ff4631f12bafa454bc392ce8b9ec83531a412c12a0d415a3a4d0", [:mix], [{:db_connection, "~> 2.4.1 or ~> 2.5", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.12", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.7", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.19 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1 or ~> 2.2", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "aff5b958a899762c5f09028c847569f7dfb9cc9d63bdb8133bff8a5546de6bf5"},
+ "ecto_sqlite3": {:hex, :ecto_sqlite3, "0.19.0", "00030bbaba150369ff3754bbc0d2c28858e8f528ae406bf6997d1772d3a03203", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:ecto, "~> 3.12", [hex: :ecto, repo: "hexpm", optional: false]}, {:ecto_sql, "~> 3.12", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:exqlite, "~> 0.22", [hex: :exqlite, repo: "hexpm", optional: false]}], "hexpm", "297b16750fe229f3056fe32afd3247de308094e8b0298aef0d73a8493ce97c81"},
"ecto_sqlite3_extras": {:hex, :ecto_sqlite3_extras, "1.2.2", "36e60b561a11441d15f26c791817999269fb578b985162207ebb08b04ca71e40", [:mix], [{:exqlite, ">= 0.13.2", [hex: :exqlite, repo: "hexpm", optional: false]}, {:table_rex, "~> 4.0", [hex: :table_rex, repo: "hexpm", optional: false]}], "hexpm", "2b66ba7246bb4f7e39e2578acd4a0e4e4be54f60ff52d450a01be95eeb78ff1e"},
- "elixir_make": {:hex, :elixir_make, "0.7.8", "505026f266552ee5aabca0b9f9c229cbb496c689537c9f922f3eb5431157efc7", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:certifi, "~> 2.0", [hex: :certifi, repo: "hexpm", optional: true]}], "hexpm", "7a71945b913d37ea89b06966e1342c85cfe549b15e6d6d081e8081c493062c07"},
- "esbuild": {:hex, :esbuild, "0.8.1", "0cbf919f0eccb136d2eeef0df49c4acf55336de864e63594adcea3814f3edf41", [:mix], [{:castore, ">= 0.0.0", [hex: :castore, repo: "hexpm", optional: false]}, {:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "25fc876a67c13cb0a776e7b5d7974851556baeda2085296c14ab48555ea7560f"},
- "ex_check": {:hex, :ex_check, "0.14.0", "d6fbe0bcc51cf38fea276f5bc2af0c9ae0a2bb059f602f8de88709421dae4f0e", [:mix], [], "hexpm", "8a602e98c66e6a4be3a639321f1f545292042f290f91fa942a285888c6868af0"},
- "expo": {:hex, :expo, "0.5.1", "249e826a897cac48f591deba863b26c16682b43711dd15ee86b92f25eafd96d9", [:mix], [], "hexpm", "68a4233b0658a3d12ee00d27d37d856b1ba48607e7ce20fd376958d0ba6ce92b"},
- "exqlite": {:hex, :exqlite, "0.19.0", "0f3ee29e35bed38552dd0ed59600aa81c78f867f5b5ff0e17d330148e0465483", [:make, :mix], [{:cc_precompiler, "~> 0.1", [hex: :cc_precompiler, repo: "hexpm", optional: false]}, {:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:elixir_make, "~> 0.7", [hex: :elixir_make, repo: "hexpm", optional: false]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "55a8fbb0443f03d4a256e3458bd1203eff5037a6624b76460eaaa9080f462b06"},
- "faker": {:hex, :faker, "0.17.0", "671019d0652f63aefd8723b72167ecdb284baf7d47ad3a82a15e9b8a6df5d1fa", [:mix], [], "hexpm", "a7d4ad84a93fd25c5f5303510753789fc2433ff241bf3b4144d3f6f291658a6a"},
- "file_system": {:hex, :file_system, "0.2.10", "fb082005a9cd1711c05b5248710f8826b02d7d1784e7c3451f9c1231d4fc162d", [:mix], [], "hexpm", "41195edbfb562a593726eda3b3e8b103a309b733ad25f3d642ba49696bf715dc"},
- "finch": {:hex, :finch, "0.17.0", "17d06e1d44d891d20dbd437335eebe844e2426a0cd7e3a3e220b461127c73f70", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: false]}, {:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.3", [hex: :mint, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.4 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 0.2.6 or ~> 1.0", [hex: :nimble_pool, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "8d014a661bb6a437263d4b5abf0bcbd3cf0deb26b1e8596f2a271d22e48934c7"},
- "floki": {:hex, :floki, "0.35.2", "87f8c75ed8654b9635b311774308b2760b47e9a579dabf2e4d5f1e1d42c39e0b", [:mix], [], "hexpm", "6b05289a8e9eac475f644f09c2e4ba7e19201fd002b89c28c1293e7bd16773d9"},
- "gettext": {:hex, :gettext, "0.24.0", "6f4d90ac5f3111673cbefc4ebee96fe5f37a114861ab8c7b7d5b30a1108ce6d8", [:mix], [{:expo, "~> 0.5.1", [hex: :expo, repo: "hexpm", optional: false]}], "hexpm", "bdf75cdfcbe9e4622dd18e034b227d77dd17f0f133853a1c73b97b3d6c770e8b"},
- "hackney": {:hex, :hackney, "1.20.1", "8d97aec62ddddd757d128bfd1df6c5861093419f8f7a4223823537bad5d064e2", [:rebar3], [{:certifi, "~> 2.12.0", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "~> 6.1.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "~> 1.0.0", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~> 1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:parse_trans, "3.4.1", [hex: :parse_trans, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "~> 1.1.0", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}, {:unicode_util_compat, "~> 0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "fe9094e5f1a2a2c0a7d10918fee36bfec0ec2a979994cff8cfe8058cd9af38e3"},
- "hpax": {:hex, :hpax, "0.1.2", "09a75600d9d8bbd064cdd741f21fc06fc1f4cf3d0fcc335e5aa19be1a7235c84", [:mix], [], "hexpm", "2c87843d5a23f5f16748ebe77969880e29809580efdaccd615cd3bed628a8c13"},
+ "elixir_make": {:hex, :elixir_make, "0.9.0", "6484b3cd8c0cee58f09f05ecaf1a140a8c97670671a6a0e7ab4dc326c3109726", [:mix], [], "hexpm", "db23d4fd8b757462ad02f8aa73431a426fe6671c80b200d9710caf3d1dd0ffdb"},
+ "esbuild": {:hex, :esbuild, "0.10.0", "b0aa3388a1c23e727c5a3e7427c932d89ee791746b0081bbe56103e9ef3d291f", [:mix], [{:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "468489cda427b974a7cc9f03ace55368a83e1a7be12fba7e30969af78e5f8c70"},
+ "ex_check": {:hex, :ex_check, "0.16.0", "07615bef493c5b8d12d5119de3914274277299c6483989e52b0f6b8358a26b5f", [:mix], [], "hexpm", "4d809b72a18d405514dda4809257d8e665ae7cf37a7aee3be6b74a34dec310f5"},
+ "expo": {:hex, :expo, "1.1.0", "f7b9ed7fb5745ebe1eeedf3d6f29226c5dd52897ac67c0f8af62a07e661e5c75", [:mix], [], "hexpm", "fbadf93f4700fb44c331362177bdca9eeb8097e8b0ef525c9cc501cb9917c960"},
+ "exqlite": {:hex, :exqlite, "0.31.0", "bdf87c618861147382cee29eb8bd91d8cfb0949f89238b353d24fa331527a33a", [:make, :mix], [{:cc_precompiler, "~> 0.1", [hex: :cc_precompiler, repo: "hexpm", optional: false]}, {:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:elixir_make, "~> 0.8", [hex: :elixir_make, repo: "hexpm", optional: false]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "df352de99ba4ce1bac2ad4943d09dbe9ad59e0e7ace55917b493ae289c78fc75"},
+ "faker": {:hex, :faker, "0.18.0", "943e479319a22ea4e8e39e8e076b81c02827d9302f3d32726c5bf82f430e6e14", [:mix], [], "hexpm", "bfbdd83958d78e2788e99ec9317c4816e651ad05e24cfd1196ce5db5b3e81797"},
+ "file_system": {:hex, :file_system, "1.1.0", "08d232062284546c6c34426997dd7ef6ec9f8bbd090eb91780283c9016840e8f", [:mix], [], "hexpm", "bfcf81244f416871f2a2e15c1b515287faa5db9c6bcf290222206d120b3d43f6"},
+ "finch": {:hex, :finch, "0.19.0", "c644641491ea854fc5c1bbaef36bfc764e3f08e7185e1f084e35e0672241b76d", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.6.2 or ~> 1.7", [hex: :mint, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.4 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 1.1", [hex: :nimble_pool, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "fc5324ce209125d1e2fa0fcd2634601c52a787aff1cd33ee833664a5af4ea2b6"},
+ "floki": {:hex, :floki, "0.37.1", "d7aaee758c8a5b4a7495799a4260754fec5530d95b9c383c03b27359dea117cf", [:mix], [], "hexpm", "673d040cb594d31318d514590246b6dd587ed341d3b67e17c1c0eb8ce7ca6f04"},
+ "gettext": {:hex, :gettext, "0.26.2", "5978aa7b21fada6deabf1f6341ddba50bc69c999e812211903b169799208f2a8", [:mix], [{:expo, "~> 0.5.1 or ~> 1.0", [hex: :expo, repo: "hexpm", optional: false]}], "hexpm", "aa978504bcf76511efdc22d580ba08e2279caab1066b76bb9aa81c4a1e0a32a5"},
+ "hackney": {:hex, :hackney, "1.24.1", "f5205a125bba6ed4587f9db3cc7c729d11316fa8f215d3e57ed1c067a9703fa9", [:rebar3], [{:certifi, "~> 2.15.0", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "~> 6.1.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "~> 1.0.0", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~> 1.4", [hex: :mimerl, repo: "hexpm", optional: false]}, {:parse_trans, "3.4.1", [hex: :parse_trans, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "~> 1.1.0", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}, {:unicode_util_compat, "~> 0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "f4a7392a0b53d8bbc3eb855bdcc919cd677358e65b2afd3840b5b3690c4c8a39"},
+ "hpax": {:hex, :hpax, "1.0.3", "ed67ef51ad4df91e75cc6a1494f851850c0bd98ebc0be6e81b026e765ee535aa", [:mix], [], "hexpm", "8eab6e1cfa8d5918c2ce4ba43588e894af35dbd8e91e6e55c817bca5847df34a"},
"idna": {:hex, :idna, "6.1.1", "8a63070e9f7d0c62eb9d9fcb360a7de382448200fbbd1b106cc96d3d8099df8d", [:rebar3], [{:unicode_util_compat, "~> 0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "92376eb7894412ed19ac475e4a86f7b413c1b9fbb5bd16dccd57934157944cea"},
- "jason": {:hex, :jason, "1.4.1", "af1504e35f629ddcdd6addb3513c3853991f694921b1b9368b0bd32beb9f1b63", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "fbb01ecdfd565b56261302f7e1fcc27c4fb8f32d56eab74db621fc154604a7a1"},
+ "jason": {:hex, :jason, "1.4.4", "b9226785a9aa77b6857ca22832cffa5d5011a667207eb2a0ad56adb5db443b8a", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "c5eb0cab91f094599f94d55bc63409236a8ec69a21a67814529e8d5f6cc90b3b"},
"metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], [], "hexpm", "69b09adddc4f74a40716ae54d140f93beb0fb8978d8636eaded0c31b6f099f16"},
- "mime": {:hex, :mime, "2.0.5", "dc34c8efd439abe6ae0343edbb8556f4d63f178594894720607772a041b04b02", [:mix], [], "hexpm", "da0d64a365c45bc9935cc5c8a7fc5e49a0e0f9932a761c55d6c52b142780a05c"},
- "mimerl": {:hex, :mimerl, "1.3.0", "d0cd9fc04b9061f82490f6581e0128379830e78535e017f7780f37fea7545726", [:rebar3], [], "hexpm", "a1e15a50d1887217de95f0b9b0793e32853f7c258a5cd227650889b38839fe9d"},
- "mint": {:hex, :mint, "1.5.2", "4805e059f96028948870d23d7783613b7e6b0e2fb4e98d720383852a760067fd", [:mix], [{:castore, "~> 0.1.0 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:hpax, "~> 0.1.1", [hex: :hpax, repo: "hexpm", optional: false]}], "hexpm", "d77d9e9ce4eb35941907f1d3df38d8f750c357865353e21d335bdcdf6d892a02"},
- "mox": {:hex, :mox, "1.1.0", "0f5e399649ce9ab7602f72e718305c0f9cdc351190f72844599545e4996af73c", [:mix], [], "hexpm", "d44474c50be02d5b72131070281a5d3895c0e7a95c780e90bc0cfe712f633a13"},
- "nimble_options": {:hex, :nimble_options, "1.1.0", "3b31a57ede9cb1502071fade751ab0c7b8dbe75a9a4c2b5bbb0943a690b63172", [:mix], [], "hexpm", "8bbbb3941af3ca9acc7835f5655ea062111c9c27bcac53e004460dfd19008a99"},
- "nimble_parsec": {:hex, :nimble_parsec, "1.4.0", "51f9b613ea62cfa97b25ccc2c1b4216e81df970acd8e16e8d1bdc58fef21370d", [:mix], [], "hexpm", "9c565862810fb383e9838c1dd2d7d2c437b3d13b267414ba6af33e50d2d1cf28"},
- "nimble_pool": {:hex, :nimble_pool, "1.0.0", "5eb82705d138f4dd4423f69ceb19ac667b3b492ae570c9f5c900bb3d2f50a847", [:mix], [], "hexpm", "80be3b882d2d351882256087078e1b1952a28bf98d0a287be87e4a24a710b67a"},
- "oban": {:hex, :oban, "2.17.3", "ddfd5710aadcd550d2e174c8d73ce5f1865601418cf54a91775f20443fb832b7", [:mix], [{:ecto_sql, "~> 3.6", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:ecto_sqlite3, "~> 0.9", [hex: :ecto_sqlite3, repo: "hexpm", optional: true]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.16", [hex: :postgrex, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "452eada8bfe0d0fefd0740ab5fa8cf3ef6c375df0b4a3c3805d179022a04738a"},
+ "mime": {:hex, :mime, "2.0.7", "b8d739037be7cd402aee1ba0306edfdef982687ee7e9859bee6198c1e7e2f128", [:mix], [], "hexpm", "6171188e399ee16023ffc5b76ce445eb6d9672e2e241d2df6050f3c771e80ccd"},
+ "mimerl": {:hex, :mimerl, "1.4.0", "3882a5ca67fbbe7117ba8947f27643557adec38fa2307490c4c4207624cb213b", [:rebar3], [], "hexpm", "13af15f9f68c65884ecca3a3891d50a7b57d82152792f3e19d88650aa126b144"},
+ "mint": {:hex, :mint, "1.7.1", "113fdb2b2f3b59e47c7955971854641c61f378549d73e829e1768de90fc1abf1", [:mix], [{:castore, "~> 0.1.0 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:hpax, "~> 0.1.1 or ~> 0.2.0 or ~> 1.0", [hex: :hpax, repo: "hexpm", optional: false]}], "hexpm", "fceba0a4d0f24301ddee3024ae116df1c3f4bb7a563a731f45fdfeb9d39a231b"},
+ "mox": {:hex, :mox, "1.2.0", "a2cd96b4b80a3883e3100a221e8adc1b98e4c3a332a8fc434c39526babafd5b3", [:mix], [{:nimble_ownership, "~> 1.0", [hex: :nimble_ownership, repo: "hexpm", optional: false]}], "hexpm", "c7b92b3cc69ee24a7eeeaf944cd7be22013c52fcb580c1f33f50845ec821089a"},
+ "nimble_options": {:hex, :nimble_options, "1.1.1", "e3a492d54d85fc3fd7c5baf411d9d2852922f66e69476317787a7b2bb000a61b", [:mix], [], "hexpm", "821b2470ca9442c4b6984882fe9bb0389371b8ddec4d45a9504f00a66f650b44"},
+ "nimble_ownership": {:hex, :nimble_ownership, "1.0.1", "f69fae0cdd451b1614364013544e66e4f5d25f36a2056a9698b793305c5aa3a6", [:mix], [], "hexpm", "3825e461025464f519f3f3e4a1f9b68c47dc151369611629ad08b636b73bb22d"},
+ "nimble_parsec": {:hex, :nimble_parsec, "1.4.2", "8efba0122db06df95bfaa78f791344a89352ba04baedd3849593bfce4d0dc1c6", [:mix], [], "hexpm", "4b21398942dda052b403bbe1da991ccd03a053668d147d53fb8c4e0efe09c973"},
+ "nimble_pool": {:hex, :nimble_pool, "1.1.0", "bf9c29fbdcba3564a8b800d1eeb5a3c58f36e1e11d7b7fb2e084a643f645f06b", [:mix], [], "hexpm", "af2e4e6b34197db81f7aad230c1118eac993acc0dae6bc83bac0126d4ae0813a"},
+ "oban": {:hex, :oban, "2.19.4", "045adb10db1161dceb75c254782f97cdc6596e7044af456a59decb6d06da73c1", [:mix], [{:ecto_sql, "~> 3.10", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:ecto_sqlite3, "~> 0.9", [hex: :ecto_sqlite3, repo: "hexpm", optional: true]}, {:igniter, "~> 0.5", [hex: :igniter, repo: "hexpm", optional: true]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: true]}, {:myxql, "~> 0.7", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.16", [hex: :postgrex, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "5fcc6219e6464525b808d97add17896e724131f498444a292071bf8991c99f97"},
+ "octo_fetch": {:hex, :octo_fetch, "0.4.0", "074b5ecbc08be10b05b27e9db08bc20a3060142769436242702931c418695b19", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "~> 1.1", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm", "cf8be6f40cd519d7000bb4e84adcf661c32e59369ca2827c4e20042eda7a7fc6"},
"parse_trans": {:hex, :parse_trans, "3.4.1", "6e6aa8167cb44cc8f39441d05193be6e6f4e7c2946cb2759f015f8c56b76e5ff", [:rebar3], [], "hexpm", "620a406ce75dada827b82e453c19cf06776be266f5a67cff34e1ef2cbb60e49a"},
- "phoenix": {:hex, :phoenix, "1.7.10", "02189140a61b2ce85bb633a9b6fd02dff705a5f1596869547aeb2b2b95edd729", [:mix], [{:castore, ">= 0.0.0", [hex: :castore, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 2.1", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.6", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:plug_crypto, "~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:websock_adapter, "~> 0.5.3", [hex: :websock_adapter, repo: "hexpm", optional: false]}], "hexpm", "cf784932e010fd736d656d7fead6a584a4498efefe5b8227e9f383bf15bb79d0"},
- "phoenix_ecto": {:hex, :phoenix_ecto, "4.4.3", "86e9878f833829c3f66da03d75254c155d91d72a201eb56ae83482328dc7ca93", [:mix], [{:ecto, "~> 3.5", [hex: :ecto, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:plug, "~> 1.9", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "d36c401206f3011fefd63d04e8ef626ec8791975d9d107f9a0817d426f61ac07"},
- "phoenix_html": {:hex, :phoenix_html, "3.3.3", "380b8fb45912b5638d2f1d925a3771b4516b9a78587249cabe394e0a5d579dc9", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm", "923ebe6fec6e2e3b3e569dfbdc6560de932cd54b000ada0208b5f45024bdd76c"},
- "phoenix_live_dashboard": {:hex, :phoenix_live_dashboard, "0.8.3", "7ff51c9b6609470f681fbea20578dede0e548302b0c8bdf338b5a753a4f045bf", [:mix], [{:ecto, "~> 3.6.2 or ~> 3.7", [hex: :ecto, repo: "hexpm", optional: true]}, {:ecto_mysql_extras, "~> 0.5", [hex: :ecto_mysql_extras, repo: "hexpm", optional: true]}, {:ecto_psql_extras, "~> 0.7", [hex: :ecto_psql_extras, repo: "hexpm", optional: true]}, {:ecto_sqlite3_extras, "~> 1.1.7 or ~> 1.2.0", [hex: :ecto_sqlite3_extras, repo: "hexpm", optional: true]}, {:mime, "~> 1.6 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:phoenix_live_view, "~> 0.19 or ~> 1.0", [hex: :phoenix_live_view, repo: "hexpm", optional: false]}, {:telemetry_metrics, "~> 0.6 or ~> 1.0", [hex: :telemetry_metrics, repo: "hexpm", optional: false]}], "hexpm", "f9470a0a8bae4f56430a23d42f977b5a6205fdba6559d76f932b876bfaec652d"},
- "phoenix_live_reload": {:hex, :phoenix_live_reload, "1.4.1", "2aff698f5e47369decde4357ba91fc9c37c6487a512b41732818f2204a8ef1d3", [:mix], [{:file_system, "~> 0.2.1 or ~> 0.3", [hex: :file_system, repo: "hexpm", optional: false]}, {:phoenix, "~> 1.4", [hex: :phoenix, repo: "hexpm", optional: false]}], "hexpm", "9bffb834e7ddf08467fe54ae58b5785507aaba6255568ae22b4d46e2bb3615ab"},
- "phoenix_live_view": {:hex, :phoenix_live_view, "0.20.3", "8b6406bc0a451f295407d7acff7f234a6314be5bbe0b3f90ed82b07f50049878", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix, "~> 1.6.15 or ~> 1.7.0", [hex: :phoenix, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 3.3 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: false]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: true]}, {:plug, "~> 1.15", [hex: :plug, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.2 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "a8e4385e05618b424779f894ed2df97d3c7518b7285fcd11979077ae6226466b"},
+ "peep": {:hex, :peep, "3.4.1", "0e5263710fa0b42675bd0a11fdcdd3ee4f484e319105b6ad9a576c91a5d3cb55", [:mix], [{:nimble_options, "~> 1.1", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:plug, "~> 1.16", [hex: :plug, repo: "hexpm", optional: true]}, {:telemetry_metrics, "~> 1.0", [hex: :telemetry_metrics, repo: "hexpm", optional: false]}], "hexpm", "7a9b8c1f17b8b9475efb27b7048afa4d89ab84ef33a3d1df13696c85c12cd632"},
+ "phoenix": {:hex, :phoenix, "1.7.21", "14ca4f1071a5f65121217d6b57ac5712d1857e40a0833aff7a691b7870fc9a3b", [:mix], [{:castore, ">= 0.0.0", [hex: :castore, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 2.1", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.7", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:plug_crypto, "~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:websock_adapter, "~> 0.5.3", [hex: :websock_adapter, repo: "hexpm", optional: false]}], "hexpm", "336dce4f86cba56fed312a7d280bf2282c720abb6074bdb1b61ec8095bdd0bc9"},
+ "phoenix_ecto": {:hex, :phoenix_ecto, "4.6.4", "dcf3483ab45bab4c15e3a47c34451392f64e433846b08469f5d16c2a4cd70052", [:mix], [{:ecto, "~> 3.5", [hex: :ecto, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.1", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:plug, "~> 1.9", [hex: :plug, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.16 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}], "hexpm", "f5b8584c36ccc9b903948a696fc9b8b81102c79c7c0c751a9f00cdec55d5f2d7"},
+ "phoenix_html": {:hex, :phoenix_html, "4.2.1", "35279e2a39140068fc03f8874408d58eef734e488fc142153f055c5454fd1c08", [:mix], [], "hexpm", "cff108100ae2715dd959ae8f2a8cef8e20b593f8dfd031c9cba92702cf23e053"},
+ "phoenix_live_dashboard": {:hex, :phoenix_live_dashboard, "0.8.7", "405880012cb4b706f26dd1c6349125bfc903fb9e44d1ea668adaf4e04d4884b7", [:mix], [{:ecto, "~> 3.6.2 or ~> 3.7", [hex: :ecto, repo: "hexpm", optional: true]}, {:ecto_mysql_extras, "~> 0.5", [hex: :ecto_mysql_extras, repo: "hexpm", optional: true]}, {:ecto_psql_extras, "~> 0.7", [hex: :ecto_psql_extras, repo: "hexpm", optional: true]}, {:ecto_sqlite3_extras, "~> 1.1.7 or ~> 1.2.0", [hex: :ecto_sqlite3_extras, repo: "hexpm", optional: true]}, {:mime, "~> 1.6 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:phoenix_live_view, "~> 0.19 or ~> 1.0", [hex: :phoenix_live_view, repo: "hexpm", optional: false]}, {:telemetry_metrics, "~> 0.6 or ~> 1.0", [hex: :telemetry_metrics, repo: "hexpm", optional: false]}], "hexpm", "3a8625cab39ec261d48a13b7468dc619c0ede099601b084e343968309bd4d7d7"},
+ "phoenix_live_reload": {:hex, :phoenix_live_reload, "1.6.0", "2791fac0e2776b640192308cc90c0dbcf67843ad51387ed4ecae2038263d708d", [:mix], [{:file_system, "~> 0.2.10 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:phoenix, "~> 1.4", [hex: :phoenix, repo: "hexpm", optional: false]}], "hexpm", "b3a1fa036d7eb2f956774eda7a7638cf5123f8f2175aca6d6420a7f95e598e1c"},
+ "phoenix_live_view": {:hex, :phoenix_live_view, "1.0.17", "beeb16d83a7d3760f7ad463df94e83b087577665d2acc0bf2987cd7d9778068f", [:mix], [{:floki, "~> 0.36", [hex: :floki, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix, "~> 1.6.15 or ~> 1.7.0 or ~> 1.8.0-rc", [hex: :phoenix, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 3.3 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: false]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: true]}, {:plug, "~> 1.15", [hex: :plug, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.2 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "a4ca05c1eb6922c4d07a508a75bfa12c45e5f4d8f77ae83283465f02c53741e1"},
"phoenix_pubsub": {:hex, :phoenix_pubsub, "2.1.3", "3168d78ba41835aecad272d5e8cd51aa87a7ac9eb836eabc42f6e57538e3731d", [:mix], [], "hexpm", "bba06bc1dcfd8cb086759f0edc94a8ba2bc8896d5331a1e2c2902bf8e36ee502"},
"phoenix_template": {:hex, :phoenix_template, "1.0.4", "e2092c132f3b5e5b2d49c96695342eb36d0ed514c5b252a77048d5969330d639", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}], "hexpm", "2c0c81f0e5c6753faf5cca2f229c9709919aba34fab866d3bc05060c9c444206"},
- "plug": {:hex, :plug, "1.15.3", "712976f504418f6dff0a3e554c40d705a9bcf89a7ccef92fc6a5ef8f16a30a97", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "cc4365a3c010a56af402e0809208873d113e9c38c401cabd88027ef4f5c01fd2"},
- "plug_cowboy": {:hex, :plug_cowboy, "2.6.1", "9a3bbfceeb65eff5f39dab529e5cd79137ac36e913c02067dba3963a26efe9b2", [:mix], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:cowboy_telemetry, "~> 0.3", [hex: :cowboy_telemetry, repo: "hexpm", optional: false]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "de36e1a21f451a18b790f37765db198075c25875c64834bcc82d90b309eb6613"},
- "plug_crypto": {:hex, :plug_crypto, "2.0.0", "77515cc10af06645abbfb5e6ad7a3e9714f805ae118fa1a70205f80d2d70fe73", [:mix], [], "hexpm", "53695bae57cc4e54566d993eb01074e4d894b65a3766f1c43e2c61a1b0f45ea9"},
- "ranch": {:hex, :ranch, "1.8.0", "8c7a100a139fd57f17327b6413e4167ac559fbc04ca7448e9be9057311597a1d", [:make, :rebar3], [], "hexpm", "49fbcfd3682fab1f5d109351b61257676da1a2fdbe295904176d5e521a2ddfe5"},
- "sobelow": {:hex, :sobelow, "0.13.0", "218afe9075904793f5c64b8837cc356e493d88fddde126a463839351870b8d1e", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "cd6e9026b85fc35d7529da14f95e85a078d9dd1907a9097b3ba6ac7ebbe34a0d"},
+ "plug": {:hex, :plug, "1.18.0", "d78df36c41f7e798f2edf1f33e1727eae438e9dd5d809a9997c463a108244042", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "819f9e176d51e44dc38132e132fe0accaf6767eab7f0303431e404da8476cfa2"},
+ "plug_cowboy": {:hex, :plug_cowboy, "2.7.3", "1304d36752e8bdde213cea59ef424ca932910a91a07ef9f3874be709c4ddb94b", [:mix], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:cowboy_telemetry, "~> 0.3", [hex: :cowboy_telemetry, repo: "hexpm", optional: false]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "77c95524b2aa5364b247fa17089029e73b951ebc1adeef429361eab0bb55819d"},
+ "plug_crypto": {:hex, :plug_crypto, "2.1.1", "19bda8184399cb24afa10be734f84a16ea0a2bc65054e23a62bb10f06bc89491", [:mix], [], "hexpm", "6470bce6ffe41c8bd497612ffde1a7e4af67f36a15eea5f921af71cf3e11247c"},
+ "prom_ex": {:hex, :prom_ex, "1.11.0", "1f6d67f2dead92224cb4f59beb3e4d319257c5728d9638b4a5e8ceb51a4f9c7e", [:mix], [{:absinthe, ">= 1.7.0", [hex: :absinthe, repo: "hexpm", optional: true]}, {:broadway, ">= 1.1.0", [hex: :broadway, repo: "hexpm", optional: true]}, {:ecto, ">= 3.11.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:finch, "~> 0.18", [hex: :finch, repo: "hexpm", optional: false]}, {:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: false]}, {:oban, ">= 2.10.0", [hex: :oban, repo: "hexpm", optional: true]}, {:octo_fetch, "~> 0.4", [hex: :octo_fetch, repo: "hexpm", optional: false]}, {:peep, "~> 3.0", [hex: :peep, repo: "hexpm", optional: false]}, {:phoenix, ">= 1.7.0", [hex: :phoenix, repo: "hexpm", optional: true]}, {:phoenix_live_view, ">= 0.20.0", [hex: :phoenix_live_view, repo: "hexpm", optional: true]}, {:plug, ">= 1.16.0", [hex: :plug, repo: "hexpm", optional: true]}, {:plug_cowboy, ">= 2.6.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:telemetry, ">= 1.0.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:telemetry_metrics, "~> 1.0", [hex: :telemetry_metrics, repo: "hexpm", optional: false]}, {:telemetry_metrics_prometheus_core, "~> 1.2", [hex: :telemetry_metrics_prometheus_core, repo: "hexpm", optional: false]}, {:telemetry_poller, "~> 1.1", [hex: :telemetry_poller, repo: "hexpm", optional: false]}], "hexpm", "76b074bc3730f0802978a7eb5c7091a65473eaaf07e99ec9e933138dcc327805"},
+ "ranch": {:hex, :ranch, "2.2.0", "25528f82bc8d7c6152c57666ca99ec716510fe0925cb188172f41ce93117b1b0", [:make, :rebar3], [], "hexpm", "fa0b99a1780c80218a4197a59ea8d3bdae32fbff7e88527d7d8a4787eff4f8e7"},
+ "sobelow": {:hex, :sobelow, "0.14.0", "dd82aae8f72503f924fe9dd97ffe4ca694d2f17ec463dcfd365987c9752af6ee", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "7ecf91e298acfd9b24f5d761f19e8f6e6ac585b9387fb6301023f1f2cd5eed5f"},
"ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.7", "354c321cf377240c7b8716899e182ce4890c5938111a1296add3ec74cf1715df", [:make, :mix, :rebar3], [], "hexpm", "fe4c190e8f37401d30167c8c405eda19469f34577987c76dde613e838bbc67f8"},
- "swoosh": {:hex, :swoosh, "1.14.4", "94e9dba91f7695a10f49b0172c4a4cb658ef24abef7e8140394521b7f3bbb2d4", [:mix], [{:cowboy, "~> 1.1 or ~> 2.4", [hex: :cowboy, repo: "hexpm", optional: true]}, {:ex_aws, "~> 2.1", [hex: :ex_aws, repo: "hexpm", optional: true]}, {:finch, "~> 0.6", [hex: :finch, repo: "hexpm", optional: true]}, {:gen_smtp, "~> 0.13 or ~> 1.0", [hex: :gen_smtp, repo: "hexpm", optional: true]}, {:hackney, "~> 1.9", [hex: :hackney, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mail, "~> 0.2", [hex: :mail, repo: "hexpm", optional: true]}, {:mime, "~> 1.1 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug, "~> 1.9", [hex: :plug, repo: "hexpm", optional: true]}, {:plug_cowboy, ">= 1.0.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:req, "~> 0.4 or ~> 1.0", [hex: :req, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.2 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "081c5a590e4ba85cc89baddf7b2beecf6c13f7f84a958f1cd969290815f0f026"},
- "table_rex": {:hex, :table_rex, "4.0.0", "3c613a68ebdc6d4d1e731bc973c233500974ec3993c99fcdabb210407b90959b", [:mix], [], "hexpm", "c35c4d5612ca49ebb0344ea10387da4d2afe278387d4019e4d8111e815df8f55"},
+ "swoosh": {:hex, :swoosh, "1.19.1", "77e839b27fc7af0704788e5854934c77d4dea7b437270c924a717513d598b8a4", [:mix], [{:bandit, ">= 1.0.0", [hex: :bandit, repo: "hexpm", optional: true]}, {:cowboy, "~> 1.1 or ~> 2.4", [hex: :cowboy, repo: "hexpm", optional: true]}, {:ex_aws, "~> 2.1", [hex: :ex_aws, repo: "hexpm", optional: true]}, {:finch, "~> 0.6", [hex: :finch, repo: "hexpm", optional: true]}, {:gen_smtp, "~> 0.13 or ~> 1.0", [hex: :gen_smtp, repo: "hexpm", optional: true]}, {:hackney, "~> 1.9", [hex: :hackney, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mail, "~> 0.2", [hex: :mail, repo: "hexpm", optional: true]}, {:mime, "~> 1.1 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mua, "~> 0.2.3", [hex: :mua, repo: "hexpm", optional: true]}, {:multipart, "~> 0.4", [hex: :multipart, repo: "hexpm", optional: true]}, {:plug, "~> 1.9", [hex: :plug, repo: "hexpm", optional: true]}, {:plug_cowboy, ">= 1.0.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:req, "~> 0.5.10 or ~> 0.6 or ~> 1.0", [hex: :req, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.2 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "eab57462d41a3330e82cb93a9d7640f5c79a85951f3457db25c1eb28fda193a6"},
+ "table_rex": {:hex, :table_rex, "4.1.0", "fbaa8b1ce154c9772012bf445bfb86b587430fb96f3b12022d3f35ee4a68c918", [:mix], [], "hexpm", "95932701df195d43bc2d1c6531178fc8338aa8f38c80f098504d529c43bc2601"},
"tailwind": {:hex, :tailwind, "0.2.2", "9e27288b568ede1d88517e8c61259bc214a12d7eed271e102db4c93fcca9b2cd", [:mix], [{:castore, ">= 0.0.0", [hex: :castore, repo: "hexpm", optional: false]}], "hexpm", "ccfb5025179ea307f7f899d1bb3905cd0ac9f687ed77feebc8f67bdca78565c4"},
- "telemetry": {:hex, :telemetry, "1.2.1", "68fdfe8d8f05a8428483a97d7aab2f268aaff24b49e0f599faa091f1d4e7f61c", [:rebar3], [], "hexpm", "dad9ce9d8effc621708f99eac538ef1cbe05d6a874dd741de2e689c47feafed5"},
- "telemetry_metrics": {:hex, :telemetry_metrics, "0.6.2", "2caabe9344ec17eafe5403304771c3539f3b6e2f7fb6a6f602558c825d0d0bfb", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "9b43db0dc33863930b9ef9d27137e78974756f5f198cae18409970ed6fa5b561"},
- "telemetry_poller": {:hex, :telemetry_poller, "1.0.0", "db91bb424e07f2bb6e73926fcafbfcbcb295f0193e0a00e825e589a0a47e8453", [:rebar3], [{:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "b3a24eafd66c3f42da30fc3ca7dda1e9d546c12250a2d60d7b81d264fbec4f6e"},
- "timex": {:hex, :timex, "3.7.11", "bb95cb4eb1d06e27346325de506bcc6c30f9c6dea40d1ebe390b262fad1862d1", [:mix], [{:combine, "~> 0.10", [hex: :combine, repo: "hexpm", optional: false]}, {:gettext, "~> 0.20", [hex: :gettext, repo: "hexpm", optional: false]}, {:tzdata, "~> 1.1", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm", "8b9024f7efbabaf9bd7aa04f65cf8dcd7c9818ca5737677c7b76acbc6a94d1aa"},
- "tzdata": {:hex, :tzdata, "1.1.1", "20c8043476dfda8504952d00adac41c6eda23912278add38edc140ae0c5bcc46", [:mix], [{:hackney, "~> 1.17", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "a69cec8352eafcd2e198dea28a34113b60fdc6cb57eb5ad65c10292a6ba89787"},
- "unicode_util_compat": {:hex, :unicode_util_compat, "0.7.0", "bc84380c9ab48177092f43ac89e4dfa2c6d62b40b8bd132b1059ecc7232f9a78", [:rebar3], [], "hexpm", "25eee6d67df61960cf6a794239566599b09e17e668d3700247bc498638152521"},
+ "telemetry": {:hex, :telemetry, "1.3.0", "fedebbae410d715cf8e7062c96a1ef32ec22e764197f70cda73d82778d61e7a2", [:rebar3], [], "hexpm", "7015fc8919dbe63764f4b4b87a95b7c0996bd539e0d499be6ec9d7f3875b79e6"},
+ "telemetry_metrics": {:hex, :telemetry_metrics, "1.1.0", "5bd5f3b5637e0abea0426b947e3ce5dd304f8b3bc6617039e2b5a008adc02f8f", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "e7b79e8ddfde70adb6db8a6623d1778ec66401f366e9a8f5dd0955c56bc8ce67"},
+ "telemetry_metrics_prometheus_core": {:hex, :telemetry_metrics_prometheus_core, "1.2.1", "c9755987d7b959b557084e6990990cb96a50d6482c683fb9622a63837f3cd3d8", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:telemetry_metrics, "~> 0.6 or ~> 1.0", [hex: :telemetry_metrics, repo: "hexpm", optional: false]}], "hexpm", "5e2c599da4983c4f88a33e9571f1458bf98b0cf6ba930f1dc3a6e8cf45d5afb6"},
+ "telemetry_poller": {:hex, :telemetry_poller, "1.2.0", "ba82e333215aed9dd2096f93bd1d13ae89d249f82760fcada0850ba33bac154b", [:rebar3], [{:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "7216e21a6c326eb9aa44328028c34e9fd348fb53667ca837be59d0aa2a0156e8"},
+ "timex": {:git, "https://github.com/bitwalker/timex.git", "cc649c7a586f1266b17d57aff3c6eb1a56116ca2", [ref: "cc649c7a586f1266b17d57aff3c6eb1a56116ca2"]},
+ "tzdata": {:hex, :tzdata, "1.1.3", "b1cef7bb6de1de90d4ddc25d33892b32830f907e7fc2fccd1e7e22778ab7dfbc", [:mix], [{:hackney, "~> 1.17", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "d4ca85575a064d29d4e94253ee95912edfb165938743dbf002acdf0dcecb0c28"},
+ "unicode_util_compat": {:hex, :unicode_util_compat, "0.7.1", "a48703a25c170eedadca83b11e88985af08d35f37c6f664d6dcfb106a97782fc", [:rebar3], [], "hexpm", "b3a917854ce3ae233619744ad1e0102e05673136776fb2fa76234f3e03b23642"},
"websock": {:hex, :websock, "0.5.3", "2f69a6ebe810328555b6fe5c831a851f485e303a7c8ce6c5f675abeb20ebdadc", [:mix], [], "hexpm", "6105453d7fac22c712ad66fab1d45abdf049868f253cf719b625151460b8b453"},
- "websock_adapter": {:hex, :websock_adapter, "0.5.5", "9dfeee8269b27e958a65b3e235b7e447769f66b5b5925385f5a569269164a210", [:mix], [{:bandit, ">= 0.6.0", [hex: :bandit, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.6", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:websock, "~> 0.5", [hex: :websock, repo: "hexpm", optional: false]}], "hexpm", "4b977ba4a01918acbf77045ff88de7f6972c2a009213c515a445c48f224ffce9"},
+ "websock_adapter": {:hex, :websock_adapter, "0.5.8", "3b97dc94e407e2d1fc666b2fb9acf6be81a1798a2602294aac000260a7c4a47d", [:mix], [{:bandit, ">= 0.6.0", [hex: :bandit, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.6", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:websock, "~> 0.5", [hex: :websock, repo: "hexpm", optional: false]}], "hexpm", "315b9a1865552212b5f35140ad194e67ce31af45bcee443d4ecb96b5fd3f3782"},
}
diff --git a/package.json b/package.json
index fab6aa5..05dfe91 100644
--- a/package.json
+++ b/package.json
@@ -6,8 +6,8 @@
},
"scripts": {
"create-erd": "sqleton -o priv/repo/erd.png priv/repo/pinchflat_dev.db",
- "lint:check": "prettier . --check --config=tooling/.prettierrc.js --ignore-path=tooling/.prettierignore --ignore-path=.gitignore",
- "lint:fix": "prettier . --write --config=tooling/.prettierrc.js --ignore-path=tooling/.prettierignore --ignore-path=.gitignore"
+ "lint:check": "prettier . --check --config=.prettierrc.js --ignore-path=.prettierignore --ignore-path=.gitignore",
+ "lint:fix": "prettier . --write --config=.prettierrc.js --ignore-path=.prettierignore --ignore-path=.gitignore"
},
"private": true
}
diff --git a/priv/grafana/application.json b/priv/grafana/application.json
new file mode 100644
index 0000000..1cc05d4
--- /dev/null
+++ b/priv/grafana/application.json
@@ -0,0 +1,607 @@
+{
+ "annotations": {
+ "list": [
+ {
+ "builtIn": 1,
+ "datasource": "-- Grafana --",
+ "enable": true,
+ "hide": true,
+ "iconColor": "rgba(0, 211, 255, 1)",
+ "name": "Annotations & Alerts",
+ "type": "dashboard"
+ },
+ {
+ "datasource": "-- Grafana --",
+ "enable": true,
+ "hide": true,
+ "iconColor": "#73BF69",
+ "limit": 100,
+ "name": "PromEx service start",
+ "showIn": 0,
+ "tags": ["prom_ex", "pinchflat", "start"],
+ "type": "tags"
+ },
+ {
+ "datasource": "-- Grafana --",
+ "enable": true,
+ "hide": true,
+ "iconColor": "#FF9830",
+ "limit": 100,
+ "name": "PromEx service stop",
+ "showIn": 0,
+ "tags": ["prom_ex", "pinchflat", "stop"],
+ "type": "tags"
+ }
+ ]
+ },
+ "description": "All the data that is presented here is captured by the PromEx Application plugin (https://github.com/akoutmos/prom_ex/blob/master/lib/prom_ex/plugins/application.ex)",
+ "editable": false,
+ "gnetId": null,
+ "graphTooltip": 1,
+ "id": null,
+ "links": [
+ {
+ "asDropdown": false,
+ "icon": "bolt",
+ "includeVars": false,
+ "keepTime": false,
+ "tags": [],
+ "targetBlank": true,
+ "title": "Sponsor PromEx",
+ "tooltip": "",
+ "type": "link",
+ "url": "https://github.com/sponsors/akoutmos"
+ },
+ {
+ "asDropdown": false,
+ "icon": "doc",
+ "includeVars": false,
+ "keepTime": false,
+ "tags": [],
+ "targetBlank": true,
+ "title": "Application Plugin Docs",
+ "tooltip": "",
+ "type": "link",
+ "url": "https://hexdocs.pm/prom_ex/PromEx.Plugins.Application.html"
+ }
+ ],
+ "panels": [
+ {
+ "datasource": "prometheus",
+ "description": "The amount of time that the application has been running.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "decimals": 1,
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ },
+ "unit": "dtdurationms"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 6,
+ "w": 8,
+ "x": 0,
+ "y": 0
+ },
+ "id": 6,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "area",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["last"],
+ "fields": "",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_application_uptime_milliseconds_count{job=\"$job\", instance=\"$instance\"}",
+ "format": "table",
+ "instant": true,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Application Uptime",
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The data is populated by the PromEx Application plugin and provides information regarding your application's dependencies.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {
+ "align": "left",
+ "displayMode": "auto"
+ },
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ }
+ },
+ "overrides": [
+ {
+ "matcher": {
+ "id": "byName",
+ "options": "Status"
+ },
+ "properties": [
+ {
+ "id": "custom.displayMode",
+ "value": "color-background"
+ },
+ {
+ "id": "mappings",
+ "value": [
+ {
+ "from": "",
+ "id": 0,
+ "text": "Started",
+ "to": "",
+ "type": 1,
+ "value": "1"
+ },
+ {
+ "from": "",
+ "id": 1,
+ "text": "Loaded",
+ "to": "",
+ "type": 1,
+ "value": "0"
+ }
+ ]
+ },
+ {
+ "id": "custom.align",
+ "value": "center"
+ },
+ {
+ "id": "custom.width",
+ "value": 202
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byName",
+ "options": "Name"
+ },
+ "properties": [
+ {
+ "id": "custom.width",
+ "value": 349
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byName",
+ "options": "Version"
+ },
+ "properties": [
+ {
+ "id": "custom.width",
+ "value": 187
+ }
+ ]
+ }
+ ]
+ },
+ "gridPos": {
+ "h": 36,
+ "w": 16,
+ "x": 8,
+ "y": 0
+ },
+ "id": 2,
+ "options": {
+ "showHeader": true,
+ "sortBy": [
+ {
+ "desc": false,
+ "displayName": "Name"
+ }
+ ]
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_application_dependency_info{job=\"$job\", instance=\"$instance\"}",
+ "format": "table",
+ "instant": true,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Dependency Information",
+ "transformations": [
+ {
+ "id": "organize",
+ "options": {
+ "excludeByName": {
+ "Time": true,
+ "__name__": true,
+ "instance": true,
+ "job": true,
+ "Value": true
+ },
+ "indexByName": {
+ "Time": 0,
+ "Value": 4,
+ "__name__": 1,
+ "instance": 2,
+ "job": 3,
+ "modules": 7,
+ "name": 5,
+ "version": 6
+ },
+ "renameByName": {
+ "Value": "Status",
+ "modules": "Number of Modules Loaded",
+ "name": "Name",
+ "version": "Version"
+ }
+ }
+ }
+ ],
+ "type": "table"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The name of the primary application that is running.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ }
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 6,
+ "w": 8,
+ "x": 0,
+ "y": 6
+ },
+ "id": 11,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "area",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["last"],
+ "fields": "/^name$/",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_application_primary_info{job=\"$job\", instance=\"$instance\"}",
+ "format": "table",
+ "instant": true,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Application Name",
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The Git SHA of the application.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ }
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 6,
+ "w": 8,
+ "x": 0,
+ "y": 12
+ },
+ "id": 10,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "area",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["last"],
+ "fields": "/^sha$/",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_application_git_sha_info{job=\"$job\", instance=\"$instance\"}",
+ "format": "table",
+ "instant": true,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Application Git SHA",
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The author of the application's last Git commit.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ }
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 6,
+ "w": 8,
+ "x": 0,
+ "y": 18
+ },
+ "id": 12,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "area",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["last"],
+ "fields": "/^author$/",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_application_git_author_info{job=\"$job\", instance=\"$instance\"}",
+ "format": "table",
+ "instant": true,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Application Git Author",
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The version of the primary application that is running.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ }
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 6,
+ "w": 8,
+ "x": 0,
+ "y": 24
+ },
+ "id": 7,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "area",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["last"],
+ "fields": "/^version$/",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_application_primary_info{job=\"$job\", instance=\"$instance\"}",
+ "format": "table",
+ "instant": true,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Application Version",
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The number of modules loaded by the primary application that is running.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ }
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 6,
+ "w": 8,
+ "x": 0,
+ "y": 30
+ },
+ "id": 9,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "area",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["last"],
+ "fields": "/^modules$/",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_application_primary_info{job=\"$job\", instance=\"$instance\"}",
+ "format": "table",
+ "instant": true,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Application Modules Loaded",
+ "type": "stat"
+ }
+ ],
+ "refresh": "5s",
+ "schemaVersion": 26,
+ "style": "dark",
+ "tags": ["PromEx", "Application", "pinchflat"],
+ "templating": {
+ "list": [
+ {
+ "allValue": null,
+ "datasource": "prometheus",
+ "definition": "label_values(pinchflat_prom_ex_prom_ex_status_info, job)",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Prometheus Job",
+ "multi": false,
+ "name": "job",
+ "options": [],
+ "query": "label_values(pinchflat_prom_ex_prom_ex_status_info, job)",
+ "refresh": 2,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 6,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ },
+ {
+ "allValue": null,
+ "datasource": "prometheus",
+ "definition": "label_values(pinchflat_prom_ex_prom_ex_status_info, instance)",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Application Instance",
+ "multi": false,
+ "name": "instance",
+ "options": [],
+ "query": "label_values(pinchflat_prom_ex_prom_ex_status_info{job=\"$job\"}, instance)",
+ "refresh": 2,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 0,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ }
+ ]
+ },
+ "time": {
+ "from": "now-1h",
+ "to": "now"
+ },
+ "timepicker": {
+ "refresh_intervals": ["5s", "10s", "30s", "1m", "5m"]
+ },
+ "timezone": "",
+ "title": "Pinchflat - PromEx Application Dashboard",
+ "uid": "7DBBC471C5775585391E8F24D1E62319",
+ "version": 1
+}
diff --git a/priv/grafana/beam.json b/priv/grafana/beam.json
new file mode 100644
index 0000000..d35d0f0
--- /dev/null
+++ b/priv/grafana/beam.json
@@ -0,0 +1,2328 @@
+{
+ "annotations": {
+ "list": [
+ {
+ "builtIn": 1,
+ "datasource": "-- Grafana --",
+ "enable": true,
+ "hide": true,
+ "iconColor": "rgba(0, 211, 255, 1)",
+ "name": "Annotations & Alerts",
+ "type": "dashboard"
+ },
+ {
+ "datasource": "-- Grafana --",
+ "enable": true,
+ "hide": true,
+ "iconColor": "#73BF69",
+ "limit": 100,
+ "name": "PromEx service start",
+ "showIn": 0,
+ "tags": ["prom_ex", "pinchflat", "start"],
+ "type": "tags"
+ },
+ {
+ "datasource": "-- Grafana --",
+ "enable": true,
+ "hide": true,
+ "iconColor": "#FF9830",
+ "limit": 100,
+ "name": "PromEx service stop",
+ "showIn": 0,
+ "tags": ["prom_ex", "pinchflat", "stop"],
+ "type": "tags"
+ }
+ ]
+ },
+ "description": "All the data that is presented here is captured by the PromEx BEAM plugin (https://github.com/akoutmos/prom_ex/blob/master/lib/prom_ex/plugins/beam.ex)",
+ "editable": false,
+ "gnetId": null,
+ "graphTooltip": 1,
+ "id": null,
+ "links": [
+ {
+ "asDropdown": false,
+ "icon": "bolt",
+ "includeVars": false,
+ "keepTime": false,
+ "tags": [],
+ "targetBlank": true,
+ "title": "Sponsor PromEx",
+ "tooltip": "",
+ "type": "link",
+ "url": "https://github.com/sponsors/akoutmos"
+ },
+ {
+ "asDropdown": false,
+ "icon": "doc",
+ "includeVars": false,
+ "keepTime": false,
+ "tags": [],
+ "targetBlank": true,
+ "title": "BEAM Plugin Docs",
+ "tooltip": "",
+ "type": "link",
+ "url": "https://hexdocs.pm/prom_ex/PromEx.Plugins.Beam.html"
+ }
+ ],
+ "panels": [
+ {
+ "collapsed": false,
+ "datasource": null,
+ "gridPos": {
+ "h": 1,
+ "w": 24,
+ "x": 0,
+ "y": 0
+ },
+ "id": 25,
+ "panels": [],
+ "title": "Overview",
+ "type": "row"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The amount of time that has passed since the system has started.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "decimals": 1,
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ },
+ "unit": "dtdurationms"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 4,
+ "w": 6,
+ "x": 0,
+ "y": 1
+ },
+ "id": 32,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "none",
+ "justifyMode": "center",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["last"],
+ "fields": "",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_beam_stats_uptime_milliseconds_count{job=\"$job\", instance=\"$instance\"}",
+ "instant": true,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Uptime",
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "Shows the various options that OTP was compiled with.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {
+ "align": null
+ },
+ "mappings": [
+ {
+ "from": "",
+ "id": 0,
+ "text": "Enabled",
+ "to": "",
+ "type": 1,
+ "value": "1"
+ },
+ {
+ "from": "",
+ "id": 1,
+ "text": "Disabled",
+ "to": "",
+ "type": 1,
+ "value": "0"
+ }
+ ],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "dark-red",
+ "value": null
+ },
+ {
+ "color": "dark-green",
+ "value": 1
+ }
+ ]
+ }
+ },
+ "overrides": [
+ {
+ "matcher": {
+ "id": "byName",
+ "options": "Status"
+ },
+ "properties": [
+ {
+ "id": "custom.align",
+ "value": "center"
+ },
+ {
+ "id": "custom.displayMode",
+ "value": "color-background"
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byName",
+ "options": "Compiler Option"
+ },
+ "properties": [
+ {
+ "id": "mappings",
+ "value": [
+ {
+ "from": "",
+ "id": 0,
+ "text": "SMP Support",
+ "to": "",
+ "type": 1,
+ "value": "pinchflat_prom_ex_beam_system_smp_support_info"
+ },
+ {
+ "from": "",
+ "id": 1,
+ "text": "Thread Support",
+ "to": "",
+ "type": 1,
+ "value": "pinchflat_prom_ex_beam_system_thread_support_info"
+ },
+ {
+ "from": "",
+ "id": 2,
+ "text": "Time Correction Support",
+ "to": "",
+ "type": 1,
+ "value": "pinchflat_prom_ex_beam_system_time_correction_support_info"
+ },
+ {
+ "from": "",
+ "id": 3,
+ "text": "JIT Support",
+ "to": "",
+ "type": 1,
+ "value": "pinchflat_prom_ex_beam_system_jit_support_info"
+ }
+ ]
+ },
+ {
+ "id": "custom.width",
+ "value": 219
+ }
+ ]
+ }
+ ]
+ },
+ "gridPos": {
+ "h": 8,
+ "w": 6,
+ "x": 6,
+ "y": 1
+ },
+ "id": 10,
+ "options": {
+ "frameIndex": 0,
+ "showHeader": true,
+ "sortBy": []
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "{__name__=~\"pinchflat_prom_ex_beam_system_thread_support_info|pinchflat_prom_ex_beam_system_jit_support_info|pinchflat_prom_ex_beam_system_smp_support_info|pinchflat_prom_ex_beam_system_time_correction_support_info\", job=\"$job\", instance=\"$instance\"}",
+ "format": "table",
+ "instant": true,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "OTP Compiler Options",
+ "transformations": [
+ {
+ "id": "organize",
+ "options": {
+ "excludeByName": {
+ "Time": true,
+ "instance": true,
+ "job": true
+ },
+ "indexByName": {
+ "Time": 0,
+ "Value": 4,
+ "__name__": 3,
+ "instance": 1,
+ "job": 2
+ },
+ "renameByName": {
+ "Value": "Status",
+ "__name__": "Compiler Option"
+ }
+ }
+ }
+ ],
+ "type": "table"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The OTP major version running the application.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ }
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 5,
+ "w": 6,
+ "x": 12,
+ "y": 1
+ },
+ "id": 4,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "none",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["lastNotNull"],
+ "fields": "",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_beam_system_version_info{job=\"$job\", instance=\"$instance\"}",
+ "instant": true,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "OTP Major Version",
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The number of running BEAM processes currently running.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ },
+ {
+ "color": "#EAB839",
+ "value": 25000
+ },
+ {
+ "color": "red",
+ "value": 75000
+ }
+ ]
+ },
+ "unit": "locale"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 5,
+ "w": 6,
+ "x": 18,
+ "y": 1
+ },
+ "id": 28,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "area",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["last"],
+ "fields": "",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_beam_stats_process_count{job=\"$job\", instance=\"$instance\"}",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Process Count",
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "Shows information regarding the schedulers configured for the BEAM instance.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {
+ "align": null
+ },
+ "mappings": [
+ {
+ "from": "",
+ "id": 0,
+ "text": "Enabled",
+ "to": "",
+ "type": 1,
+ "value": "1"
+ },
+ {
+ "from": "",
+ "id": 1,
+ "text": "Disabled",
+ "to": "",
+ "type": 1,
+ "value": "0"
+ }
+ ],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ }
+ },
+ "overrides": [
+ {
+ "matcher": {
+ "id": "byName",
+ "options": "Value"
+ },
+ "properties": [
+ {
+ "id": "custom.align",
+ "value": "center"
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byName",
+ "options": "Type"
+ },
+ "properties": [
+ {
+ "id": "mappings",
+ "value": [
+ {
+ "from": "",
+ "id": 0,
+ "text": "Dirty CPU Schedulers",
+ "to": "",
+ "type": 1,
+ "value": "pinchflat_prom_ex_beam_system_dirty_cpu_schedulers_info"
+ },
+ {
+ "from": "",
+ "id": 1,
+ "text": "Dirty CPU Schedulers Online",
+ "to": "",
+ "type": 1,
+ "value": "pinchflat_prom_ex_beam_system_dirty_cpu_schedulers_online_info"
+ },
+ {
+ "from": "",
+ "id": 2,
+ "text": "Dirty IO Schedulers",
+ "to": "",
+ "type": 1,
+ "value": "pinchflat_prom_ex_beam_system_dirty_io_schedulers_info"
+ },
+ {
+ "from": "",
+ "id": 3,
+ "text": "System Schedulers",
+ "to": "",
+ "type": 1,
+ "value": "pinchflat_prom_ex_beam_system_schedulers_info"
+ },
+ {
+ "from": "",
+ "id": 4,
+ "text": "System Schedulers Online",
+ "to": "",
+ "type": 1,
+ "value": "pinchflat_prom_ex_beam_system_schedulers_online_info"
+ },
+ {
+ "from": "",
+ "id": 5,
+ "text": "Word Size in Bytes",
+ "to": "",
+ "type": 1,
+ "value": "pinchflat_prom_ex_beam_system_word_size_bytes_info"
+ },
+ {
+ "from": "",
+ "id": 6,
+ "text": "Logical Processors",
+ "to": "",
+ "type": 1,
+ "value": "pinchflat_prom_ex_beam_system_logical_processors_info"
+ },
+ {
+ "from": "",
+ "id": 7,
+ "text": "Logical Processors Available",
+ "to": "",
+ "type": 1,
+ "value": "pinchflat_prom_ex_beam_system_logical_processors_available_info"
+ },
+ {
+ "from": "",
+ "id": 8,
+ "text": "Logical Processors Online",
+ "to": "",
+ "type": 1,
+ "value": "pinchflat_prom_ex_beam_system_logical_processors_online_info"
+ }
+ ]
+ }
+ ]
+ }
+ ]
+ },
+ "gridPos": {
+ "h": 11,
+ "w": 6,
+ "x": 0,
+ "y": 5
+ },
+ "id": 11,
+ "options": {
+ "frameIndex": 0,
+ "showHeader": true
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "{__name__=~\"pinchflat_prom_ex_beam_system_dirty_cpu_schedulers_info|pinchflat_prom_ex_beam_system_dirty_cpu_schedulers_online_info|pinchflat_prom_ex_beam_system_dirty_io_schedulers_info|pinchflat_prom_ex_beam_system_schedulers_info|pinchflat_prom_ex_beam_system_schedulers_online_info|pinchflat_prom_ex_beam_system_word_size_bytes_info|pinchflat_prom_ex_beam_system_logical_processors_info|pinchflat_prom_ex_beam_system_logical_processors_available_info|pinchflat_prom_ex_beam_system_logical_processors_online_info\", job=\"$job\", instance=\"$instance\"}",
+ "format": "table",
+ "instant": true,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "System Information",
+ "transformations": [
+ {
+ "id": "organize",
+ "options": {
+ "excludeByName": {
+ "Time": true,
+ "instance": true,
+ "job": true
+ },
+ "indexByName": {
+ "Time": 0,
+ "Value": 4,
+ "__name__": 3,
+ "instance": 1,
+ "job": 2
+ },
+ "renameByName": {
+ "Value": "Value",
+ "__name__": "Type"
+ }
+ }
+ }
+ ],
+ "type": "table"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The number of currently active ports.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ },
+ {
+ "color": "#EAB839",
+ "value": 25000
+ },
+ {
+ "color": "red",
+ "value": 75000
+ }
+ ]
+ },
+ "unit": "locale"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 5,
+ "w": 6,
+ "x": 12,
+ "y": 6
+ },
+ "id": 29,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "area",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["last"],
+ "fields": "",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_beam_stats_port_count{job=\"$job\", instance=\"$instance\"}",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Port Count",
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The number of currently allocated ETS tables.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ },
+ {
+ "color": "#EAB839",
+ "value": 2500
+ },
+ {
+ "color": "red",
+ "value": 5000
+ }
+ ]
+ },
+ "unit": "locale"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 5,
+ "w": 6,
+ "x": 18,
+ "y": 6
+ },
+ "id": 31,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "area",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["last"],
+ "fields": "",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_beam_stats_ets_count{job=\"$job\", instance=\"$instance\"}",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "ETS Table Count",
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "Shows information regarding the system limits of the BEAM instance.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {
+ "align": null
+ },
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ }
+ },
+ "overrides": [
+ {
+ "matcher": {
+ "id": "byName",
+ "options": "Value"
+ },
+ "properties": [
+ {
+ "id": "custom.align",
+ "value": "center"
+ },
+ {
+ "id": "unit",
+ "value": "locale"
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byName",
+ "options": "Type"
+ },
+ "properties": [
+ {
+ "id": "mappings",
+ "value": [
+ {
+ "from": "",
+ "id": 0,
+ "text": "Atom Limit",
+ "to": "",
+ "type": 1,
+ "value": "pinchflat_prom_ex_beam_system_atom_limit_info"
+ },
+ {
+ "from": "",
+ "id": 1,
+ "text": "ETS Table Limit",
+ "to": "",
+ "type": 1,
+ "value": "pinchflat_prom_ex_beam_system_ets_limit_info"
+ },
+ {
+ "from": "",
+ "id": 2,
+ "text": "Port Limit",
+ "to": "",
+ "type": 1,
+ "value": "pinchflat_prom_ex_beam_system_port_limit_info"
+ },
+ {
+ "from": "",
+ "id": 3,
+ "text": "Process Limit",
+ "to": "",
+ "type": 1,
+ "value": "pinchflat_prom_ex_beam_system_process_limit_info"
+ },
+ {
+ "from": "",
+ "id": 4,
+ "text": "Thread Pool Size",
+ "to": "",
+ "type": 1,
+ "value": "beam_system_thread_pool_size_info"
+ }
+ ]
+ }
+ ]
+ }
+ ]
+ },
+ "gridPos": {
+ "h": 7,
+ "w": 6,
+ "x": 6,
+ "y": 9
+ },
+ "id": 12,
+ "options": {
+ "frameIndex": 0,
+ "showHeader": true
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "{__name__=~\"pinchflat_prom_ex_beam_system_ets_limit_info|pinchflat_prom_ex_beam_system_port_limit_info|pinchflat_prom_ex_beam_system_process_limit_info|pinchflat_prom_ex_beam_system_atom_limit_info\", job=\"$job\", instance=\"$instance\"}",
+ "format": "table",
+ "instant": true,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "System Limits",
+ "transformations": [
+ {
+ "id": "organize",
+ "options": {
+ "excludeByName": {
+ "Time": true,
+ "instance": true,
+ "job": true
+ },
+ "indexByName": {
+ "Time": 0,
+ "Value": 4,
+ "__name__": 3,
+ "instance": 1,
+ "job": 2
+ },
+ "renameByName": {
+ "Value": "Value",
+ "__name__": "Type"
+ }
+ }
+ }
+ ],
+ "type": "table"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The amount of memory currently allocated by the BEAM.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ },
+ {
+ "color": "#EAB839",
+ "value": 500000000
+ },
+ {
+ "color": "red",
+ "value": 1000000000
+ }
+ ]
+ },
+ "unit": "bytes"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 5,
+ "w": 6,
+ "x": 12,
+ "y": 11
+ },
+ "id": 27,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "area",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["last"],
+ "fields": "",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_beam_memory_allocated_bytes{job=\"$job\", instance=\"$instance\"}",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Total Memory Usage",
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The number of atoms currently in the atom table.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ },
+ {
+ "color": "#EAB839",
+ "value": 150000
+ },
+ {
+ "color": "red",
+ "value": 300000
+ }
+ ]
+ },
+ "unit": "locale"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 5,
+ "w": 6,
+ "x": 18,
+ "y": 11
+ },
+ "id": 30,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "area",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["last"],
+ "fields": "",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_beam_stats_atom_count{job=\"$job\", instance=\"$instance\"}",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Atom Count",
+ "type": "stat"
+ },
+ {
+ "collapsed": false,
+ "datasource": null,
+ "gridPos": {
+ "h": 1,
+ "w": 24,
+ "x": 0,
+ "y": 16
+ },
+ "id": 17,
+ "panels": [],
+ "title": "Details",
+ "type": "row"
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "Stats on the current memory usage.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [
+ {
+ "from": "",
+ "id": 0,
+ "text": "",
+ "to": "",
+ "type": 1
+ }
+ ],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ },
+ "unit": "bytes"
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 15,
+ "w": 12,
+ "x": 0,
+ "y": 17
+ },
+ "hiddenSeries": false,
+ "id": 21,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null as zero",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_beam_memory_allocated_bytes{job=\"$job\", instance=\"$instance\"}",
+ "interval": "",
+ "legendFormat": "Total Usage",
+ "refId": "A"
+ },
+ {
+ "expr": "pinchflat_prom_ex_beam_memory_atom_total_bytes{job=\"$job\", instance=\"$instance\"}",
+ "interval": "",
+ "legendFormat": "Atoms",
+ "refId": "B"
+ },
+ {
+ "expr": "pinchflat_prom_ex_beam_memory_binary_total_bytes{job=\"$job\", instance=\"$instance\"}",
+ "interval": "",
+ "legendFormat": "Binaries",
+ "refId": "C"
+ },
+ {
+ "expr": "pinchflat_prom_ex_beam_memory_code_total_bytes{job=\"$job\", instance=\"$instance\"}",
+ "interval": "",
+ "legendFormat": "Code",
+ "refId": "D"
+ },
+ {
+ "expr": "pinchflat_prom_ex_beam_memory_ets_total_bytes{job=\"$job\", instance=\"$instance\"}",
+ "interval": "",
+ "legendFormat": "ETS",
+ "refId": "E"
+ },
+ {
+ "expr": "pinchflat_prom_ex_beam_memory_processes_total_bytes{job=\"$job\", instance=\"$instance\"}",
+ "interval": "",
+ "legendFormat": "Processes",
+ "refId": "F"
+ },
+ {
+ "expr": "pinchflat_prom_ex_beam_memory_persistent_term_total_bytes{job=\"$job\", instance=\"$instance\"}",
+ "interval": "",
+ "legendFormat": "Persistent Term",
+ "refId": "G"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Allocated Memory",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "bytes",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "The number of garbage collection events that are occurring and the number of bytes reclaimed",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [
+ {
+ "from": "",
+ "id": 0,
+ "text": "",
+ "to": "",
+ "type": 1
+ }
+ ],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ }
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 15,
+ "w": 12,
+ "x": 12,
+ "y": 17
+ },
+ "hiddenSeries": false,
+ "id": 34,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "hideZero": false,
+ "max": false,
+ "min": false,
+ "rightSide": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null as zero",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [
+ {
+ "alias": "Garbage Collections",
+ "yaxis": 1
+ },
+ {
+ "alias": "Bytes Reclaimed",
+ "yaxis": 2
+ }
+ ],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "irate(pinchflat_prom_ex_beam_stats_gc_count{job=\"$job\", instance=\"$instance\"}[$interval])",
+ "interval": "",
+ "legendFormat": "Garbage Collections",
+ "refId": "A"
+ },
+ {
+ "expr": "irate(pinchflat_prom_ex_beam_stats_gc_reclaimed_bytes{job=\"$job\", instance=\"$instance\"}[$interval])",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "Bytes Reclaimed",
+ "refId": "B"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Garbage Collection",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "locale",
+ "label": "Garbage Collections",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "bytes",
+ "label": "Reclaimed Bytes",
+ "logBase": 2,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "Statistics regarding the normal scheduler.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [
+ {
+ "from": "",
+ "id": 0,
+ "text": "",
+ "to": "",
+ "type": 1
+ }
+ ],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ }
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 15,
+ "w": 12,
+ "x": 0,
+ "y": 32
+ },
+ "hiddenSeries": false,
+ "id": 36,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "hideZero": false,
+ "max": false,
+ "min": false,
+ "rightSide": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null as zero",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_beam_stats_active_task_count{job=\"$job\", instance=\"$instance\", type=\"normal\"}",
+ "interval": "",
+ "legendFormat": "Normal Tasks",
+ "refId": "A"
+ },
+ {
+ "expr": "0 - pinchflat_prom_ex_beam_stats_run_queue_count{job=\"$job\", instance=\"$instance\", type=\"normal\"}",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "Normal Run Queue",
+ "refId": "B"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Normal Scheduler Status",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "locale",
+ "label": "Normal Run Queue (-) / Normal Tasks (+)",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "locale",
+ "label": "",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": true,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "Statistics regarding the dirty schedulers.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [
+ {
+ "from": "",
+ "id": 0,
+ "text": "",
+ "to": "",
+ "type": 1
+ }
+ ],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ }
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 15,
+ "w": 12,
+ "x": 12,
+ "y": 32
+ },
+ "hiddenSeries": false,
+ "id": 37,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "hideZero": false,
+ "max": false,
+ "min": false,
+ "rightSide": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null as zero",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_beam_stats_active_task_count{job=\"$job\", instance=\"$instance\", type=\"dirty\"}",
+ "interval": "",
+ "legendFormat": "Dirty Tasks",
+ "refId": "C"
+ },
+ {
+ "expr": "0 - pinchflat_prom_ex_beam_stats_run_queue_count{job=\"$job\", instance=\"$instance\", type=\"dirty\"}",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "Dirty Run Queue",
+ "refId": "D"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Dirty Scheduler Status",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "locale",
+ "label": "Dirty Run Queue (-) / Dirty Tasks (+)",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "locale",
+ "label": "",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": true,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "The number of context switches that have occurred along with the number of reductions.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [
+ {
+ "from": "",
+ "id": 0,
+ "text": "",
+ "to": "",
+ "type": 1
+ }
+ ],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ }
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 15,
+ "w": 12,
+ "x": 0,
+ "y": 47
+ },
+ "hiddenSeries": false,
+ "id": 35,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "hideZero": false,
+ "max": false,
+ "min": false,
+ "rightSide": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null as zero",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [
+ {
+ "alias": "Reductions",
+ "yaxis": 1
+ },
+ {
+ "alias": "Context Switches",
+ "yaxis": 2
+ }
+ ],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "irate(pinchflat_prom_ex_beam_stats_reduction_count{job=\"$job\", instance=\"$instance\"}[$interval]) / 1000000",
+ "interval": "",
+ "legendFormat": "Reductions",
+ "refId": "A"
+ },
+ {
+ "expr": "irate(pinchflat_prom_ex_beam_stats_context_switch_count{job=\"$job\", instance=\"$instance\"}[$interval])",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "Context Switches",
+ "refId": "B"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "VM Load",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "locale",
+ "label": "Reduction Count (in Millions)",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "locale",
+ "label": "Context Switch Count",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "The number of running BEAM processes.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [
+ {
+ "from": "",
+ "id": 0,
+ "text": "",
+ "to": "",
+ "type": 1
+ }
+ ],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ }
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 15,
+ "w": 12,
+ "x": 12,
+ "y": 47
+ },
+ "hiddenSeries": false,
+ "id": 19,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_beam_stats_process_count{job=\"$job\", instance=\"$instance\"}",
+ "interval": "",
+ "legendFormat": "Active Processes",
+ "refId": "A"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Active BEAM Processes",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": "Processes",
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "The number of atoms currently in the atom table",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [
+ {
+ "from": "",
+ "id": 0,
+ "text": "",
+ "to": "",
+ "type": 1
+ }
+ ],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ },
+ "unit": "locale"
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 15,
+ "w": 12,
+ "x": 0,
+ "y": 62
+ },
+ "hiddenSeries": false,
+ "id": 22,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null as zero",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_beam_stats_atom_count{job=\"$job\", instance=\"$instance\"}",
+ "interval": "",
+ "legendFormat": "Active ports",
+ "refId": "A"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Atom Table Size",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "locale",
+ "label": "Atoms",
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "The number of bytes sent and received through ports.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [
+ {
+ "from": "",
+ "id": 0,
+ "text": "",
+ "to": "",
+ "type": 1
+ }
+ ],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ },
+ "unit": "Bps"
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 15,
+ "w": 12,
+ "x": 12,
+ "y": 62
+ },
+ "hiddenSeries": false,
+ "id": 33,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "hideZero": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null as zero",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "0 - irate(pinchflat_prom_ex_beam_stats_port_io_byte_count{job=\"$job\", instance=\"$instance\", type=\"input\"}[$interval])",
+ "interval": "",
+ "legendFormat": "Data Received",
+ "refId": "A"
+ },
+ {
+ "expr": "irate(pinchflat_prom_ex_beam_stats_port_io_byte_count{job=\"$job\", instance=\"$instance\", type=\"output\"}[$interval])",
+ "interval": "",
+ "legendFormat": "Data Sent",
+ "refId": "B"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Port IO",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "Bps",
+ "label": "Data Received (-) / Data Sent (+)",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "The number of active ETS tables.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [
+ {
+ "from": "",
+ "id": 0,
+ "text": "",
+ "to": "",
+ "type": 1
+ }
+ ],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ }
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 15,
+ "w": 12,
+ "x": 0,
+ "y": 77
+ },
+ "hiddenSeries": false,
+ "id": 23,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null as zero",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_beam_stats_ets_count{job=\"$job\", instance=\"$instance\"}",
+ "interval": "",
+ "legendFormat": "Active ETS Tables",
+ "refId": "A"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "ETS Tables",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": "Tables",
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "The number of active ports.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [
+ {
+ "from": "",
+ "id": 0,
+ "text": "",
+ "to": "",
+ "type": 1
+ }
+ ],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ }
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 15,
+ "w": 12,
+ "x": 12,
+ "y": 77
+ },
+ "hiddenSeries": false,
+ "id": 20,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null as zero",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_beam_stats_port_count{job=\"$job\", instance=\"$instance\"}",
+ "interval": "",
+ "legendFormat": "Active Ports",
+ "refId": "A"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Active Ports",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": "Ports",
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ }
+ ],
+ "refresh": "5s",
+ "schemaVersion": 26,
+ "style": "dark",
+ "tags": ["PromEx", "BEAM", "pinchflat"],
+ "templating": {
+ "list": [
+ {
+ "allValue": null,
+ "datasource": "prometheus",
+ "definition": "label_values(pinchflat_prom_ex_prom_ex_status_info, job)",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Prometheus Job",
+ "multi": false,
+ "name": "job",
+ "options": [],
+ "query": "label_values(pinchflat_prom_ex_prom_ex_status_info, job)",
+ "refresh": 2,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 6,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ },
+ {
+ "allValue": null,
+ "datasource": "prometheus",
+ "definition": "label_values(pinchflat_prom_ex_prom_ex_status_info, instance)",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Application Instance",
+ "multi": false,
+ "name": "instance",
+ "options": [],
+ "query": "label_values(pinchflat_prom_ex_prom_ex_status_info{job=\"$job\"}, instance)",
+ "refresh": 2,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 0,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ },
+ {
+ "auto": false,
+ "auto_count": 30,
+ "auto_min": "10s",
+ "current": {
+ "selected": false,
+ "text": "30s",
+ "value": "30s"
+ },
+ "hide": 0,
+ "label": "Interval",
+ "name": "interval",
+ "options": [
+ {
+ "selected": false,
+ "text": "15s",
+ "value": "15s"
+ },
+ {
+ "selected": true,
+ "text": "30s",
+ "value": "30s"
+ },
+ {
+ "selected": false,
+ "text": "1m",
+ "value": "1m"
+ },
+ {
+ "selected": false,
+ "text": "5m",
+ "value": "5m"
+ },
+ {
+ "selected": false,
+ "text": "15m",
+ "value": "15m"
+ },
+ {
+ "selected": false,
+ "text": "30m",
+ "value": "30m"
+ },
+ {
+ "selected": false,
+ "text": "1h",
+ "value": "1h"
+ }
+ ],
+ "query": "15s, 30s, 1m, 5m, 15m, 30m, 1h",
+ "queryValue": "",
+ "refresh": 2,
+ "skipUrlSync": false,
+ "type": "interval"
+ }
+ ]
+ },
+ "time": {
+ "from": "now-1h",
+ "to": "now"
+ },
+ "timepicker": {
+ "refresh_intervals": ["5s", "10s", "30s", "1m", "5m"]
+ },
+ "timezone": "",
+ "title": "Pinchflat - PromEx Beam Dashboard",
+ "uid": "14B578642B07F5DEA133D4DE6A6AAD0A",
+ "version": 1
+}
diff --git a/priv/grafana/ecto.json b/priv/grafana/ecto.json
new file mode 100644
index 0000000..dc60a69
--- /dev/null
+++ b/priv/grafana/ecto.json
@@ -0,0 +1,1247 @@
+{
+ "annotations": {
+ "list": [
+ {
+ "builtIn": 1,
+ "datasource": "-- Grafana --",
+ "enable": true,
+ "hide": true,
+ "iconColor": "rgba(0, 211, 255, 1)",
+ "name": "Annotations & Alerts",
+ "type": "dashboard"
+ },
+ {
+ "datasource": "-- Grafana --",
+ "enable": true,
+ "hide": true,
+ "iconColor": "#73BF69",
+ "limit": 100,
+ "name": "PromEx service start",
+ "showIn": 0,
+ "tags": ["prom_ex", "pinchflat", "start"],
+ "type": "tags"
+ },
+ {
+ "datasource": "-- Grafana --",
+ "enable": true,
+ "hide": true,
+ "iconColor": "#FF9830",
+ "limit": 100,
+ "name": "PromEx service stop",
+ "showIn": 0,
+ "tags": ["prom_ex", "pinchflat", "stop"],
+ "type": "tags"
+ }
+ ]
+ },
+ "description": "All the data that is presented here is captured by the PromEx Ecto plugin (https://github.com/akoutmos/prom_ex/blob/master/lib/prom_ex/plugins/ecto.ex)",
+ "editable": false,
+ "gnetId": null,
+ "graphTooltip": 1,
+ "id": null,
+ "links": [
+ {
+ "asDropdown": false,
+ "icon": "bolt",
+ "includeVars": false,
+ "keepTime": false,
+ "tags": [],
+ "targetBlank": true,
+ "title": "Sponsor PromEx",
+ "tooltip": "",
+ "type": "link",
+ "url": "https://github.com/sponsors/akoutmos"
+ },
+ {
+ "asDropdown": false,
+ "icon": "doc",
+ "includeVars": false,
+ "keepTime": false,
+ "tags": [],
+ "targetBlank": true,
+ "title": "Ecto Plugin Docs",
+ "tooltip": "",
+ "type": "link",
+ "url": "https://hexdocs.pm/prom_ex/PromEx.Plugins.Ecto.html"
+ }
+ ],
+ "panels": [
+ {
+ "collapsed": false,
+ "datasource": null,
+ "gridPos": {
+ "h": 1,
+ "w": 24,
+ "x": 0,
+ "y": 0
+ },
+ "id": 19,
+ "panels": [],
+ "title": "Overview",
+ "type": "row"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The time the connection spent waiting before being checked out for the query.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ },
+ "unit": "ms"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 8,
+ "w": 6,
+ "x": 0,
+ "y": 1
+ },
+ "id": 24,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "area",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["lastNotNull"],
+ "fields": "",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_ecto_repo_query_idle_time_milliseconds_sum{instance=\"$instance\", job=\"$job\", repo=\"$repo\"} / pinchflat_prom_ex_ecto_repo_query_idle_time_milliseconds_count{instance=\"$instance\", job=\"$job\", repo=\"$repo\"}",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Connection Idle Time (Average)",
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The time spent waiting to check out a database connection.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ },
+ "unit": "ms"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 8,
+ "w": 6,
+ "x": 6,
+ "y": 1
+ },
+ "id": 29,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "area",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["lastNotNull"],
+ "fields": "",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_ecto_repo_query_queue_time_milliseconds_sum{instance=\"$instance\", job=\"$job\", repo=\"$repo\"} / pinchflat_prom_ex_ecto_repo_query_queue_time_milliseconds_count{instance=\"$instance\", job=\"$job\", repo=\"$repo\"}",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Connection Queue Time (Average)",
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The time spent decoding the data received from the database.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ },
+ "unit": "ms"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 8,
+ "w": 6,
+ "x": 12,
+ "y": 1
+ },
+ "id": 30,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "area",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["lastNotNull"],
+ "fields": "",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_ecto_repo_query_decode_time_milliseconds_sum{instance=\"$instance\", job=\"$job\", repo=\"$repo\"} / pinchflat_prom_ex_ecto_repo_query_decode_time_milliseconds_count{instance=\"$instance\", job=\"$job\", repo=\"$repo\"}",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Connection Decode Time (Average)",
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The time spent executing the query. This value is the average time across all query types (SELECT, DELETE, etc).",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ },
+ "unit": "ms"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 8,
+ "w": 6,
+ "x": 18,
+ "y": 1
+ },
+ "id": 31,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "area",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["lastNotNull"],
+ "fields": "",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "sum(pinchflat_prom_ex_ecto_repo_query_execution_time_milliseconds_sum{instance=\"$instance\", job=\"$job\", repo=\"$repo\"}) / sum(pinchflat_prom_ex_ecto_repo_query_execution_time_milliseconds_count{instance=\"$instance\", job=\"$job\", repo=\"$repo\"})",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Connection Execution Time (Average)",
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The name of the Ecto Repo module",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ },
+ "unit": "none"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 4,
+ "w": 3,
+ "x": 0,
+ "y": 9
+ },
+ "id": 21,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "none",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["last"],
+ "fields": "/^repo$/",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_ecto_repo_init_status_info{job=\"$job\", instance=\"$instance\", repo=\"$repo\"}",
+ "instant": true,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Ecto Repo",
+ "transformations": [
+ {
+ "id": "labelsToFields",
+ "options": {}
+ }
+ ],
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The host of the database that the Repo is connected to.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ },
+ "unit": "none"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 4,
+ "w": 3,
+ "x": 3,
+ "y": 9
+ },
+ "id": 26,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "none",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["last"],
+ "fields": "/^database_host$/",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_ecto_repo_init_status_info{job=\"$job\", instance=\"$instance\", repo=\"$repo\"}",
+ "instant": true,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Database Host",
+ "transformations": [
+ {
+ "id": "labelsToFields",
+ "options": {}
+ }
+ ],
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The name of the database that the Repo is connected to.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ },
+ "unit": "none"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 4,
+ "w": 3,
+ "x": 6,
+ "y": 9
+ },
+ "id": 25,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "none",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["last"],
+ "fields": "/^database_name$/",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_ecto_repo_init_status_info{job=\"$job\", instance=\"$instance\", repo=\"$repo\"}",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Database Name",
+ "transformations": [
+ {
+ "id": "labelsToFields",
+ "options": {}
+ }
+ ],
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The pool size that the repo was initialized with.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ },
+ "unit": "none"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 4,
+ "w": 3,
+ "x": 9,
+ "y": 9
+ },
+ "id": 27,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "none",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["last"],
+ "fields": "/init_pool_size/",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_ecto_repo_init_pool_size{job=\"$job\", instance=\"$instance\", repo=\"$repo\"}",
+ "instant": true,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Pool Size",
+ "transformations": [
+ {
+ "id": "labelsToFields",
+ "options": {}
+ }
+ ],
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The timeout duration that the Repo was initialized with.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ },
+ "unit": "ms"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 4,
+ "w": 3,
+ "x": 12,
+ "y": 9
+ },
+ "id": 28,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "none",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["last"],
+ "fields": "/init_timeout_duration/",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_ecto_repo_init_timeout_duration{job=\"$job\", instance=\"$instance\", repo=\"$repo\"}",
+ "instant": true,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Timeout Duration",
+ "transformations": [
+ {
+ "id": "labelsToFields",
+ "options": {}
+ }
+ ],
+ "type": "stat"
+ },
+ {
+ "collapsed": false,
+ "datasource": null,
+ "gridPos": {
+ "h": 1,
+ "w": 24,
+ "x": 0,
+ "y": 13
+ },
+ "id": 2,
+ "panels": [],
+ "title": "Query Details",
+ "type": "row"
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "The average total time to execute and decode a database query.",
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 0,
+ "y": 14
+ },
+ "hiddenSeries": false,
+ "id": 32,
+ "legend": {
+ "alignAsTable": true,
+ "avg": false,
+ "current": true,
+ "hideEmpty": false,
+ "hideZero": false,
+ "max": true,
+ "min": false,
+ "rightSide": true,
+ "show": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null as zero",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "sum(irate(pinchflat_prom_ex_ecto_repo_query_total_time_milliseconds_sum{job=\"$job\", instance=\"$instance\", repo=\"$repo\"}[$interval])) by(command) / sum(irate(pinchflat_prom_ex_ecto_repo_query_total_time_milliseconds_count{job=\"$job\", instance=\"$instance\", repo=\"$repo\"}[$interval])) by(command)",
+ "interval": "",
+ "legendFormat": "{{ command }}",
+ "refId": "A"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Average Total Execution Time",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "ms",
+ "label": "Response Time",
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "cards": {
+ "cardPadding": null,
+ "cardRound": null
+ },
+ "color": {
+ "cardColor": "#b4ff00",
+ "colorScale": "sqrt",
+ "colorScheme": "interpolateOranges",
+ "exponent": 0.5,
+ "mode": "spectrum"
+ },
+ "dataFormat": "tsbuckets",
+ "datasource": "prometheus",
+ "description": "A heatmap showing the total time spread across all Repo query executions (regardless of query type).",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 12,
+ "y": 14
+ },
+ "heatmap": {},
+ "hideZeroBuckets": true,
+ "highlightCards": true,
+ "id": 6,
+ "legend": {
+ "show": true
+ },
+ "pluginVersion": "7.1.3",
+ "reverseYBuckets": false,
+ "targets": [
+ {
+ "expr": "sum(irate(pinchflat_prom_ex_ecto_repo_query_execution_time_milliseconds_bucket{job=\"$job\", instance=\"$instance\", repo=\"$repo\"}[$interval])) by (le)",
+ "format": "heatmap",
+ "hide": false,
+ "interval": "",
+ "legendFormat": "{{ le }}",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Total Execution Time",
+ "tooltip": {
+ "show": true,
+ "showHistogram": true
+ },
+ "type": "heatmap",
+ "xAxis": {
+ "show": true
+ },
+ "xBucketNumber": null,
+ "xBucketSize": null,
+ "yAxis": {
+ "decimals": null,
+ "format": "ms",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true,
+ "splitFactor": null
+ },
+ "yBucketBound": "auto",
+ "yBucketNumber": null,
+ "yBucketSize": null
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "The average query execution time per Ecto command.",
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 0,
+ "y": 27
+ },
+ "hiddenSeries": false,
+ "id": 11,
+ "legend": {
+ "alignAsTable": true,
+ "avg": false,
+ "current": true,
+ "hideEmpty": false,
+ "hideZero": false,
+ "max": true,
+ "min": false,
+ "rightSide": true,
+ "show": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null as zero",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "sum(irate(pinchflat_prom_ex_ecto_repo_query_execution_time_milliseconds_sum{job=\"$job\", instance=\"$instance\", repo=\"$repo\"}[$interval])) by(command) / sum(irate(pinchflat_prom_ex_ecto_repo_query_execution_time_milliseconds_count{job=\"$job\", instance=\"$instance\", repo=\"$repo\"}[$interval])) by(command)",
+ "interval": "",
+ "legendFormat": "{{ command }}",
+ "refId": "A"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Average Query Execution Time",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "ms",
+ "label": "Response Time",
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "cards": {
+ "cardPadding": null,
+ "cardRound": null
+ },
+ "color": {
+ "cardColor": "#b4ff00",
+ "colorScale": "sqrt",
+ "colorScheme": "interpolateOranges",
+ "exponent": 0.5,
+ "mode": "spectrum"
+ },
+ "dataFormat": "tsbuckets",
+ "datasource": "prometheus",
+ "description": "A heatmap showing the request time spread across all Repo query executions (regardless of query type).",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 12,
+ "y": 27
+ },
+ "heatmap": {},
+ "hideZeroBuckets": true,
+ "highlightCards": true,
+ "id": 33,
+ "legend": {
+ "show": true
+ },
+ "pluginVersion": "7.1.3",
+ "reverseYBuckets": false,
+ "targets": [
+ {
+ "expr": "sum(irate(pinchflat_prom_ex_ecto_repo_query_execution_time_milliseconds_bucket{job=\"$job\", instance=\"$instance\", repo=\"$repo\"}[$interval])) by (le)",
+ "format": "heatmap",
+ "hide": false,
+ "interval": "",
+ "legendFormat": "{{ le }}",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Query Execution Time",
+ "tooltip": {
+ "show": true,
+ "showHistogram": true
+ },
+ "type": "heatmap",
+ "xAxis": {
+ "show": true
+ },
+ "xBucketNumber": null,
+ "xBucketSize": null,
+ "yAxis": {
+ "decimals": null,
+ "format": "ms",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true,
+ "splitFactor": null
+ },
+ "yBucketBound": "auto",
+ "yBucketNumber": null,
+ "yBucketSize": null
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "The number of operations taking place on each data source.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 0,
+ "y": 40
+ },
+ "hiddenSeries": false,
+ "id": 13,
+ "legend": {
+ "alignAsTable": true,
+ "avg": false,
+ "current": true,
+ "hideEmpty": false,
+ "hideZero": false,
+ "max": true,
+ "min": false,
+ "rightSide": true,
+ "show": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null as zero",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "sum(increase(pinchflat_prom_ex_ecto_repo_query_execution_time_milliseconds_count{instance=\"$instance\", job=\"$job\", repo=\"$repo\"}[$interval])) by(source)",
+ "interval": "",
+ "legendFormat": "{{ source }}",
+ "refId": "A"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Operations Per Source",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "transformations": [],
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "locale",
+ "label": "Source Operations",
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "cards": {
+ "cardPadding": null,
+ "cardRound": null
+ },
+ "color": {
+ "cardColor": "#b4ff00",
+ "colorScale": "sqrt",
+ "colorScheme": "interpolateOranges",
+ "exponent": 0.5,
+ "mode": "spectrum"
+ },
+ "dataFormat": "tsbuckets",
+ "datasource": "prometheus",
+ "description": "A heatmap showing the number of results returned from the database (summed up across all operations and sources).",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 12,
+ "y": 40
+ },
+ "heatmap": {},
+ "hideZeroBuckets": true,
+ "highlightCards": true,
+ "id": 12,
+ "legend": {
+ "show": true
+ },
+ "pluginVersion": "7.1.3",
+ "reverseYBuckets": false,
+ "targets": [
+ {
+ "expr": "sum(irate(pinchflat_prom_ex_ecto_repo_query_results_returned_bucket{job=\"$job\", instance=\"$instance\", repo=\"$repo\"}[$interval])) by (le)",
+ "format": "heatmap",
+ "hide": false,
+ "interval": "",
+ "legendFormat": "{{ le }}",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Results Returned",
+ "tooltip": {
+ "show": true,
+ "showHistogram": true
+ },
+ "type": "heatmap",
+ "xAxis": {
+ "show": true
+ },
+ "xBucketNumber": null,
+ "xBucketSize": null,
+ "yAxis": {
+ "decimals": null,
+ "format": "locale",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true,
+ "splitFactor": null
+ },
+ "yBucketBound": "auto",
+ "yBucketNumber": null,
+ "yBucketSize": null
+ }
+ ],
+ "refresh": "5s",
+ "schemaVersion": 26,
+ "style": "dark",
+ "tags": ["PromEx", "Ecto", "pinchflat"],
+ "templating": {
+ "list": [
+ {
+ "allValue": null,
+ "datasource": "prometheus",
+ "definition": "label_values(pinchflat_prom_ex_prom_ex_status_info, job)",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Prometheus Job",
+ "multi": false,
+ "name": "job",
+ "options": [],
+ "query": "label_values(pinchflat_prom_ex_prom_ex_status_info, job)",
+ "refresh": 2,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 6,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ },
+ {
+ "allValue": null,
+ "datasource": "prometheus",
+ "definition": "label_values(pinchflat_prom_ex_prom_ex_status_info, instance)",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Application Instance",
+ "multi": false,
+ "name": "instance",
+ "options": [],
+ "query": "label_values(pinchflat_prom_ex_prom_ex_status_info{job=\"$job\"}, instance)",
+ "refresh": 2,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 0,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ },
+ {
+ "allValue": null,
+ "datasource": "prometheus",
+ "definition": "label_values(pinchflat_prom_ex_ecto_repo_init_status_info, repo)",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Ecto Repo",
+ "multi": false,
+ "name": "repo",
+ "options": [],
+ "query": "label_values(pinchflat_prom_ex_ecto_repo_init_status_info, repo)",
+ "refresh": 1,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 1,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ },
+ {
+ "auto": false,
+ "auto_count": 30,
+ "auto_min": "10s",
+ "current": {
+ "selected": false,
+ "text": "30s",
+ "value": "30s"
+ },
+ "hide": 0,
+ "label": "Interval",
+ "name": "interval",
+ "options": [
+ {
+ "selected": false,
+ "text": "15s",
+ "value": "15s"
+ },
+ {
+ "selected": true,
+ "text": "30s",
+ "value": "30s"
+ },
+ {
+ "selected": false,
+ "text": "1m",
+ "value": "1m"
+ },
+ {
+ "selected": false,
+ "text": "5m",
+ "value": "5m"
+ },
+ {
+ "selected": false,
+ "text": "15m",
+ "value": "15m"
+ },
+ {
+ "selected": false,
+ "text": "30m",
+ "value": "30m"
+ },
+ {
+ "selected": false,
+ "text": "1h",
+ "value": "1h"
+ }
+ ],
+ "query": "15s, 30s, 1m, 5m, 15m, 30m, 1h",
+ "queryValue": "",
+ "refresh": 2,
+ "skipUrlSync": false,
+ "type": "interval"
+ }
+ ]
+ },
+ "time": {
+ "from": "now-1h",
+ "to": "now"
+ },
+ "timepicker": {
+ "refresh_intervals": ["5s", "10s", "30s", "1m", "5m"]
+ },
+ "timezone": "",
+ "title": "Pinchflat - PromEx Ecto Dashboard",
+ "uid": "449823C137E6C016E4480ADDA42E94EE",
+ "version": 1
+}
diff --git a/priv/grafana/oban.json b/priv/grafana/oban.json
new file mode 100644
index 0000000..bf6c843
--- /dev/null
+++ b/priv/grafana/oban.json
@@ -0,0 +1,2866 @@
+{
+ "annotations": {
+ "list": [
+ {
+ "builtIn": 1,
+ "datasource": "-- Grafana --",
+ "enable": true,
+ "hide": true,
+ "iconColor": "rgba(0, 211, 255, 1)",
+ "name": "Annotations & Alerts",
+ "type": "dashboard"
+ },
+ {
+ "datasource": "-- Grafana --",
+ "enable": true,
+ "hide": true,
+ "iconColor": "#73BF69",
+ "limit": 100,
+ "name": "PromEx service start",
+ "showIn": 0,
+ "tags": ["prom_ex", "pinchflat", "start"],
+ "type": "tags"
+ },
+ {
+ "datasource": "-- Grafana --",
+ "enable": true,
+ "hide": true,
+ "iconColor": "#FF9830",
+ "limit": 100,
+ "name": "PromEx service stop",
+ "showIn": 0,
+ "tags": ["prom_ex", "pinchflat", "stop"],
+ "type": "tags"
+ }
+ ]
+ },
+ "description": "All the data that is presented here is captured by the PromEx Oban plugin (https://github.com/akoutmos/prom_ex/blob/master/lib/prom_ex/plugins/oban.ex)",
+ "editable": false,
+ "gnetId": null,
+ "graphTooltip": 1,
+ "id": null,
+ "links": [
+ {
+ "asDropdown": false,
+ "icon": "bolt",
+ "includeVars": false,
+ "keepTime": false,
+ "tags": [],
+ "targetBlank": true,
+ "title": "Sponsor PromEx",
+ "tooltip": "",
+ "type": "link",
+ "url": "https://github.com/sponsors/akoutmos"
+ },
+ {
+ "asDropdown": false,
+ "icon": "doc",
+ "includeVars": false,
+ "keepTime": false,
+ "tags": [],
+ "targetBlank": true,
+ "title": "Oban Plugin Docs",
+ "tooltip": "",
+ "type": "link",
+ "url": "https://hexdocs.pm/prom_ex/PromEx.Plugins.Oban.html"
+ }
+ ],
+ "panels": [
+ {
+ "collapsed": false,
+ "datasource": null,
+ "gridPos": {
+ "h": 1,
+ "w": 24,
+ "x": 0,
+ "y": 0
+ },
+ "id": 19,
+ "panels": [],
+ "title": "Overview",
+ "type": "row"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The configuration of the selected Oban instance",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {
+ "align": null,
+ "displayMode": "auto"
+ },
+ "decimals": 1,
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ },
+ "unit": "ms"
+ },
+ "overrides": [
+ {
+ "matcher": {
+ "id": "byName",
+ "options": "Config Setting"
+ },
+ "properties": [
+ {
+ "id": "mappings",
+ "value": [
+ {
+ "from": "",
+ "id": 1,
+ "text": "Cooldown",
+ "to": "",
+ "type": 1,
+ "value": "pinchflat_prom_ex_oban_init_dispatch_cooldown_milliseconds"
+ },
+ {
+ "from": "",
+ "id": 2,
+ "text": "Global Poll Interval",
+ "to": "",
+ "type": 1,
+ "value": "pinchflat_prom_ex_oban_init_poll_interval_milliseconds"
+ },
+ {
+ "from": "",
+ "id": 3,
+ "text": "Shutdown Grace Period",
+ "to": "",
+ "type": 1,
+ "value": "pinchflat_prom_ex_oban_init_shutdown_grace_period_milliseconds"
+ }
+ ]
+ }
+ ]
+ }
+ ]
+ },
+ "gridPos": {
+ "h": 6,
+ "w": 5,
+ "x": 0,
+ "y": 1
+ },
+ "id": 50,
+ "options": {
+ "showHeader": true
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "{__name__=~\"pinchflat_prom_ex_oban_init_shutdown_grace_period_milliseconds|pinchflat_prom_ex_oban_init_poll_interval_milliseconds|pinchflat_prom_ex_oban_init_dispatch_cooldown_milliseconds\", job=\"$job\", instance=\"$instance\", name=\"$oban\"}",
+ "format": "table",
+ "instant": true,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Oban Time Limit Settings",
+ "transformations": [
+ {
+ "id": "organize",
+ "options": {
+ "excludeByName": {
+ "Time": true,
+ "__name__": false,
+ "instance": true,
+ "job": true,
+ "name": true
+ },
+ "indexByName": {},
+ "renameByName": {
+ "Value": "Time Value",
+ "__name__": "Config Setting"
+ }
+ }
+ }
+ ],
+ "type": "table"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The number of jobs in each queue that are in the available state.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [],
+ "max": 500,
+ "noValue": "NA",
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ },
+ {
+ "color": "#EAB839",
+ "value": 400
+ },
+ {
+ "color": "red",
+ "value": 500
+ }
+ ]
+ },
+ "unit": "locale"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 12,
+ "w": 7,
+ "x": 5,
+ "y": 1
+ },
+ "id": 60,
+ "options": {
+ "displayMode": "lcd",
+ "orientation": "horizontal",
+ "reduceOptions": {
+ "calcs": ["lastNotNull"],
+ "fields": "",
+ "values": false
+ },
+ "showUnfilled": true
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_oban_queue_length_count{job=\"$job\", instance=\"$instance\", name=\"$oban\", state=\"available\"}",
+ "instant": true,
+ "interval": "",
+ "legendFormat": "{{ queue }}",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Available Jobs in Queue",
+ "type": "bargauge"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The configuration of the selected Oban instance",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {
+ "align": null,
+ "displayMode": "auto"
+ },
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ }
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 6,
+ "w": 12,
+ "x": 12,
+ "y": 1
+ },
+ "id": 49,
+ "options": {
+ "showHeader": true,
+ "sortBy": []
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "{__name__=\"pinchflat_prom_ex_oban_init_status_info\", job=\"$job\", instance=\"$instance\", name=\"$oban\"}",
+ "format": "table",
+ "instant": true,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Oban Configuration",
+ "transformations": [
+ {
+ "id": "organize",
+ "options": {
+ "excludeByName": {
+ "Time": true,
+ "Value": true,
+ "__name__": true,
+ "instance": true,
+ "job": true
+ },
+ "indexByName": {},
+ "renameByName": {
+ "name": "Oban Instance",
+ "node": "Node",
+ "plugins": "Configured Plugins",
+ "prefix": "Postgres Schema",
+ "queues": "Configured Queues",
+ "repo": "Ecto Repo"
+ }
+ }
+ }
+ ],
+ "type": "table"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The configured concurrency limits for each of the queues under the Oban supervisor.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {
+ "align": null
+ },
+ "decimals": 1,
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ },
+ "unit": "locale"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 6,
+ "w": 5,
+ "x": 0,
+ "y": 7
+ },
+ "id": 36,
+ "options": {
+ "showHeader": true
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_oban_init_queue_concurrency_limit{instance=\"$instance\", job=\"$job\", name=\"$oban\"}",
+ "format": "table",
+ "instant": true,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Queue Concurrency Limits",
+ "transformations": [
+ {
+ "id": "organize",
+ "options": {
+ "excludeByName": {
+ "Time": true,
+ "__name__": true,
+ "instance": true,
+ "job": true,
+ "name": true
+ },
+ "indexByName": {},
+ "renameByName": {
+ "Value": "Concurrent Workers",
+ "queue": "Queue"
+ }
+ }
+ }
+ ],
+ "type": "table"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The number of jobs processed across all queues and states.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "decimals": 1,
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ },
+ "unit": "locale"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 6,
+ "w": 4,
+ "x": 12,
+ "y": 7
+ },
+ "id": 45,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "area",
+ "justifyMode": "center",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["lastNotNull"],
+ "fields": "",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "round(sum(increase(pinchflat_prom_ex_oban_job_processing_duration_milliseconds_count{instance=\"$instance\", job=\"$job\", name=\"$oban\"}[$interval])))",
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A",
+ "format": "table"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Processed Jobs (by $interval interval)",
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The number of jobs that resulted in an error across all queues.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "decimals": 1,
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ },
+ "unit": "locale"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 6,
+ "w": 4,
+ "x": 16,
+ "y": 7
+ },
+ "id": 46,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "area",
+ "justifyMode": "center",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["lastNotNull"],
+ "fields": "",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "round(sum(increase(pinchflat_prom_ex_oban_job_exception_duration_milliseconds_count{instance=\"$instance\", job=\"$job\", name=\"$oban\"}[$interval])))",
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A",
+ "format": "table"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Job Errors (by $interval interval)",
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The number of jobs that have been enqueued.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "decimals": 1,
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ },
+ "unit": "locale"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 6,
+ "w": 4,
+ "x": 20,
+ "y": 7
+ },
+ "id": 47,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "area",
+ "justifyMode": "center",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["lastNotNull"],
+ "fields": "",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "round(sum(increase(pinchflat_prom_ex_oban_producer_dispatched_count_count{instance=\"$instance\", job=\"$job\", name=\"$oban\"}[$interval])))",
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A",
+ "format": "table"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Jobs Enqueued (by $interval interval)",
+ "type": "stat"
+ },
+ {
+ "collapsed": false,
+ "datasource": null,
+ "gridPos": {
+ "h": 1,
+ "w": 24,
+ "x": 0,
+ "y": 13
+ },
+ "id": 2,
+ "panels": [],
+ "title": "Successful Job Processing Details",
+ "type": "row"
+ },
+ {
+ "cards": {
+ "cardPadding": null,
+ "cardRound": null
+ },
+ "color": {
+ "cardColor": "#b4ff00",
+ "colorScale": "sqrt",
+ "colorScheme": "interpolateOranges",
+ "exponent": 0.5,
+ "mode": "spectrum"
+ },
+ "dataFormat": "tsbuckets",
+ "datasource": "prometheus",
+ "description": "A heatmap showing the time it took to process jobs across all queues.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 0,
+ "y": 14
+ },
+ "heatmap": {},
+ "hideZeroBuckets": true,
+ "highlightCards": true,
+ "id": 6,
+ "legend": {
+ "show": true
+ },
+ "pluginVersion": "7.1.3",
+ "reverseYBuckets": false,
+ "targets": [
+ {
+ "expr": "sum(irate(pinchflat_prom_ex_oban_job_processing_duration_milliseconds_bucket{job=\"$job\", instance=\"$instance\", name=\"$oban\"}[$interval])) by (le)",
+ "format": "heatmap",
+ "hide": false,
+ "interval": "",
+ "legendFormat": "{{ le }}",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Job Execution Time",
+ "tooltip": {
+ "show": true,
+ "showHistogram": true
+ },
+ "type": "heatmap",
+ "xAxis": {
+ "show": true
+ },
+ "xBucketNumber": null,
+ "xBucketSize": null,
+ "yAxis": {
+ "decimals": null,
+ "format": "ms",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true,
+ "splitFactor": null
+ },
+ "yBucketBound": "auto",
+ "yBucketNumber": null,
+ "yBucketSize": null
+ },
+ {
+ "cards": {
+ "cardPadding": null,
+ "cardRound": null
+ },
+ "color": {
+ "cardColor": "#b4ff00",
+ "colorScale": "sqrt",
+ "colorScheme": "interpolateOranges",
+ "exponent": 0.5,
+ "mode": "spectrum"
+ },
+ "dataFormat": "tsbuckets",
+ "datasource": "prometheus",
+ "description": "A heatmap showing how long jobs are waiting in queue for processing.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 12,
+ "y": 14
+ },
+ "heatmap": {},
+ "hideZeroBuckets": true,
+ "highlightCards": true,
+ "id": 12,
+ "legend": {
+ "show": true
+ },
+ "pluginVersion": "7.1.3",
+ "reverseYBuckets": false,
+ "targets": [
+ {
+ "expr": "sum(irate(pinchflat_prom_ex_oban_job_queue_time_milliseconds_bucket{job=\"$job\", instance=\"$instance\", name=\"$oban\"}[$interval])) by (le)",
+ "format": "heatmap",
+ "hide": false,
+ "interval": "",
+ "legendFormat": "{{ le }}",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Job Queue Time",
+ "tooltip": {
+ "show": true,
+ "showHistogram": true
+ },
+ "type": "heatmap",
+ "xAxis": {
+ "show": true
+ },
+ "xBucketNumber": null,
+ "xBucketSize": null,
+ "yAxis": {
+ "decimals": null,
+ "format": "ms",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true,
+ "splitFactor": null
+ },
+ "yBucketBound": "auto",
+ "yBucketNumber": null,
+ "yBucketSize": null
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "The average time it took to process jobs per queue. This will include jobs that ended with a successful, snoozed, or discarded state.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 0,
+ "y": 27
+ },
+ "hiddenSeries": false,
+ "id": 11,
+ "legend": {
+ "alignAsTable": true,
+ "avg": false,
+ "current": false,
+ "hideEmpty": false,
+ "hideZero": false,
+ "max": false,
+ "min": false,
+ "rightSide": true,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null as zero",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "irate(pinchflat_prom_ex_oban_job_processing_duration_milliseconds_sum{job=\"$job\", instance=\"$instance\", name=\"$oban\"}[$interval]) / irate(pinchflat_prom_ex_oban_job_processing_duration_milliseconds_count{job=\"$job\", instance=\"$instance\", name=\"$oban\"}[$interval])",
+ "interval": "",
+ "legendFormat": "({{ state }}) {{ name }} :: {{ worker }}",
+ "refId": "A"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Average Job Execution Time",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "ms",
+ "label": "Execution Time",
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "The average time a job waited in queue for processing.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 12,
+ "y": 27
+ },
+ "hiddenSeries": false,
+ "id": 44,
+ "legend": {
+ "alignAsTable": true,
+ "avg": false,
+ "current": false,
+ "hideEmpty": false,
+ "hideZero": false,
+ "max": false,
+ "min": false,
+ "rightSide": true,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null as zero",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "irate(pinchflat_prom_ex_oban_job_queue_time_milliseconds_sum{job=\"$job\", instance=\"$instance\", name=\"$oban\"}[$interval]) / irate(pinchflat_prom_ex_oban_job_queue_time_milliseconds_count{job=\"$job\", instance=\"$instance\", name=\"$oban\"}[$interval])",
+ "interval": "",
+ "legendFormat": "({{ state }}) {{ name }} :: {{ worker }}",
+ "refId": "A"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Average Job Queue Time",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "ms",
+ "label": "In Queue Time",
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "The average time it took to process jobs per queue. This will include jobs that ended with a successful, snoozed, or discarded state.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 0,
+ "y": 40
+ },
+ "hiddenSeries": false,
+ "id": 51,
+ "legend": {
+ "alignAsTable": true,
+ "avg": false,
+ "current": false,
+ "hideEmpty": false,
+ "hideZero": false,
+ "max": false,
+ "min": false,
+ "rightSide": true,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null as zero",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "irate(pinchflat_prom_ex_oban_job_complete_attempts_sum{job=\"$job\", instance=\"$instance\", name=\"$oban\"}[$interval]) / irate(pinchflat_prom_ex_oban_job_complete_attempts_count{job=\"$job\", instance=\"$instance\", name=\"$oban\"}[$interval])",
+ "interval": "",
+ "legendFormat": "({{ state }}) {{ name }} :: {{ worker }}",
+ "refId": "A"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Average Job Attempts",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "locale",
+ "label": "Job Attempts",
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "collapsed": false,
+ "datasource": null,
+ "gridPos": {
+ "h": 1,
+ "w": 24,
+ "x": 0,
+ "y": 53
+ },
+ "id": 54,
+ "panels": [],
+ "title": "Job Error Details",
+ "type": "row"
+ },
+ {
+ "cards": {
+ "cardPadding": null,
+ "cardRound": null
+ },
+ "color": {
+ "cardColor": "#b4ff00",
+ "colorScale": "sqrt",
+ "colorScheme": "interpolateOranges",
+ "exponent": 0.5,
+ "mode": "spectrum"
+ },
+ "dataFormat": "tsbuckets",
+ "datasource": "prometheus",
+ "description": "A heatmap showing the time it took to process jobs across all queues that resulted in an error.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 0,
+ "y": 54
+ },
+ "heatmap": {},
+ "hideZeroBuckets": true,
+ "highlightCards": true,
+ "id": 55,
+ "legend": {
+ "show": true
+ },
+ "pluginVersion": "7.1.3",
+ "reverseYBuckets": false,
+ "targets": [
+ {
+ "expr": "sum(irate(pinchflat_prom_ex_oban_job_exception_duration_milliseconds_bucket{job=\"$job\", instance=\"$instance\", name=\"$oban\"}[$interval])) by (le)",
+ "format": "heatmap",
+ "hide": false,
+ "interval": "",
+ "legendFormat": "{{ le }}",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Job Execution Time",
+ "tooltip": {
+ "show": true,
+ "showHistogram": true
+ },
+ "type": "heatmap",
+ "xAxis": {
+ "show": true
+ },
+ "xBucketNumber": null,
+ "xBucketSize": null,
+ "yAxis": {
+ "decimals": null,
+ "format": "ms",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true,
+ "splitFactor": null
+ },
+ "yBucketBound": "auto",
+ "yBucketNumber": null,
+ "yBucketSize": null
+ },
+ {
+ "cards": {
+ "cardPadding": null,
+ "cardRound": null
+ },
+ "color": {
+ "cardColor": "#b4ff00",
+ "colorScale": "sqrt",
+ "colorScheme": "interpolateOranges",
+ "exponent": 0.5,
+ "mode": "spectrum"
+ },
+ "dataFormat": "tsbuckets",
+ "datasource": "prometheus",
+ "description": "A heatmap showing how long jobs are waiting in queue for processing that resulted in an error.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 12,
+ "y": 54
+ },
+ "heatmap": {},
+ "hideZeroBuckets": true,
+ "highlightCards": true,
+ "id": 56,
+ "legend": {
+ "show": true
+ },
+ "pluginVersion": "7.1.3",
+ "reverseYBuckets": false,
+ "targets": [
+ {
+ "expr": "sum(irate(pinchflat_prom_ex_oban_job_exception_queue_time_milliseconds_bucket{job=\"$job\", instance=\"$instance\", name=\"$oban\"}[$interval])) by (le)",
+ "format": "heatmap",
+ "hide": false,
+ "interval": "",
+ "legendFormat": "{{ le }}",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Job Queue Time",
+ "tooltip": {
+ "show": true,
+ "showHistogram": true
+ },
+ "type": "heatmap",
+ "xAxis": {
+ "show": true
+ },
+ "xBucketNumber": null,
+ "xBucketSize": null,
+ "yAxis": {
+ "decimals": null,
+ "format": "ms",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true,
+ "splitFactor": null
+ },
+ "yBucketBound": "auto",
+ "yBucketNumber": null,
+ "yBucketSize": null
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "The average time it took to process jobs per queue that resulted in an error.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 0,
+ "y": 67
+ },
+ "hiddenSeries": false,
+ "id": 57,
+ "legend": {
+ "alignAsTable": true,
+ "avg": false,
+ "current": false,
+ "hideEmpty": false,
+ "hideZero": false,
+ "max": false,
+ "min": false,
+ "rightSide": true,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null as zero",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "irate(pinchflat_prom_ex_oban_job_exception_duration_milliseconds_sum{job=\"$job\", instance=\"$instance\", name=\"$oban\"}[$interval]) / irate(pinchflat_prom_ex_oban_job_exception_duration_milliseconds_count{job=\"$job\", instance=\"$instance\", name=\"$oban\"}[$interval])",
+ "interval": "",
+ "legendFormat": "({{ error }}) {{ name }} :: {{ worker }}",
+ "refId": "A"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Average Job Execution Time",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "ms",
+ "label": "Execution Time",
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "The average time a job waited in queue for processing prior to resulting in an error.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 12,
+ "y": 67
+ },
+ "hiddenSeries": false,
+ "id": 58,
+ "legend": {
+ "alignAsTable": true,
+ "avg": false,
+ "current": false,
+ "hideEmpty": false,
+ "hideZero": false,
+ "max": false,
+ "min": false,
+ "rightSide": true,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null as zero",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "irate(pinchflat_prom_ex_oban_job_exception_queue_time_milliseconds_sum{job=\"$job\", instance=\"$instance\", name=\"$oban\"}[$interval]) / irate(pinchflat_prom_ex_oban_job_exception_queue_time_milliseconds_count{job=\"$job\", instance=\"$instance\", name=\"$oban\"}[$interval])",
+ "interval": "",
+ "legendFormat": "({{ error }}) {{ name }} :: {{ worker }}",
+ "refId": "A"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Average Job Queue Time",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "ms",
+ "label": "In Queue Time",
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "The average time it took to process jobs per queue that resulted in an error.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 0,
+ "y": 80
+ },
+ "hiddenSeries": false,
+ "id": 52,
+ "legend": {
+ "alignAsTable": true,
+ "avg": false,
+ "current": false,
+ "hideEmpty": false,
+ "hideZero": false,
+ "max": false,
+ "min": false,
+ "rightSide": true,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null as zero",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "irate(pinchflat_prom_ex_oban_job_exception_attempts_sum{job=\"$job\", instance=\"$instance\", name=\"$oban\"}[$interval]) / irate(pinchflat_prom_ex_oban_job_exception_attempts_count{job=\"$job\", instance=\"$instance\", name=\"$oban\"}[$interval])",
+ "interval": "",
+ "legendFormat": "({{ state }}) {{ name }} :: {{ worker }}",
+ "refId": "A"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Average Job Failure Attempts",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "locale",
+ "label": "Job Attempts",
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "collapsed": false,
+ "datasource": null,
+ "gridPos": {
+ "h": 1,
+ "w": 24,
+ "x": 0,
+ "y": 93
+ },
+ "id": 4,
+ "panels": [],
+ "title": "Queue Details",
+ "type": "row"
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "The number of jobs marked as available in each queue.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 0,
+ "y": 94
+ },
+ "hiddenSeries": false,
+ "id": 15,
+ "legend": {
+ "alignAsTable": false,
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "rightSide": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null as zero",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_oban_queue_length_count{job=\"$job\", instance=\"$instance\", name=\"$oban\", state=\"available\"}",
+ "interval": "",
+ "legendFormat": "{{ queue }}",
+ "refId": "A"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Available Jobs per Queue",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": "Number of Jobs",
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "The number of jobs marked as completed in each queue.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 12,
+ "y": 94
+ },
+ "hiddenSeries": false,
+ "id": 63,
+ "legend": {
+ "alignAsTable": false,
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "rightSide": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null as zero",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_oban_queue_length_count{job=\"$job\", instance=\"$instance\", name=\"$oban\", state=\"completed\"}",
+ "interval": "",
+ "legendFormat": "{{ queue }}",
+ "refId": "A"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Completed Jobs per Queue",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": "Number of Jobs",
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "The number of jobs marked as executing in each queue.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 0,
+ "y": 107
+ },
+ "hiddenSeries": false,
+ "id": 62,
+ "legend": {
+ "alignAsTable": false,
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "rightSide": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null as zero",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_oban_queue_length_count{job=\"$job\", instance=\"$instance\", name=\"$oban\", state=\"executing\"}",
+ "interval": "",
+ "legendFormat": "{{ queue }}",
+ "refId": "A"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Executing Jobs per Queue",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": "Number of Jobs",
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "The number of jobs marked as retryable in each queue.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 12,
+ "y": 107
+ },
+ "hiddenSeries": false,
+ "id": 61,
+ "legend": {
+ "alignAsTable": false,
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "rightSide": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null as zero",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_oban_queue_length_count{job=\"$job\", instance=\"$instance\", name=\"$oban\", state=\"retryable\"}",
+ "interval": "",
+ "legendFormat": "{{ queue }}",
+ "refId": "A"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Retryable Jobs per Queue",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": "Number of Jobs",
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "collapsed": false,
+ "datasource": null,
+ "gridPos": {
+ "h": 1,
+ "w": 24,
+ "x": 0,
+ "y": 120
+ },
+ "id": 30,
+ "panels": [],
+ "title": "Producer Details",
+ "type": "row"
+ },
+ {
+ "cards": {
+ "cardPadding": null,
+ "cardRound": null
+ },
+ "color": {
+ "cardColor": "#b4ff00",
+ "colorScale": "sqrt",
+ "colorScheme": "interpolateOranges",
+ "exponent": 0.5,
+ "mode": "spectrum"
+ },
+ "dataFormat": "tsbuckets",
+ "datasource": "prometheus",
+ "description": "A heatmap showing how long jobs to be dispatched.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 0,
+ "y": 121
+ },
+ "heatmap": {},
+ "hideZeroBuckets": true,
+ "highlightCards": true,
+ "id": 73,
+ "legend": {
+ "show": true
+ },
+ "pluginVersion": "7.1.3",
+ "reverseYBuckets": false,
+ "targets": [
+ {
+ "expr": "sum(irate(pinchflat_prom_ex_oban_producer_duration_milliseconds_bucket{job=\"$job\", instance=\"$instance\", name=\"$oban\"}[$interval])) by (le)",
+ "format": "heatmap",
+ "hide": false,
+ "interval": "",
+ "legendFormat": "{{ le }}",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Producer Dispatch Time",
+ "tooltip": {
+ "show": true,
+ "showHistogram": true
+ },
+ "type": "heatmap",
+ "xAxis": {
+ "show": true
+ },
+ "xBucketNumber": null,
+ "xBucketSize": null,
+ "yAxis": {
+ "decimals": null,
+ "format": "ms",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true,
+ "splitFactor": null
+ },
+ "yBucketBound": "auto",
+ "yBucketNumber": null,
+ "yBucketSize": null
+ },
+ {
+ "cards": {
+ "cardPadding": null,
+ "cardRound": null
+ },
+ "color": {
+ "cardColor": "#b4ff00",
+ "colorScale": "sqrt",
+ "colorScheme": "interpolateOranges",
+ "exponent": 0.5,
+ "mode": "spectrum"
+ },
+ "dataFormat": "tsbuckets",
+ "datasource": "prometheus",
+ "description": "A heatmap showing the number of jobs that were dispatched.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 12,
+ "y": 121
+ },
+ "heatmap": {},
+ "hideZeroBuckets": true,
+ "highlightCards": true,
+ "id": 74,
+ "legend": {
+ "show": true
+ },
+ "pluginVersion": "7.1.3",
+ "reverseYBuckets": false,
+ "targets": [
+ {
+ "expr": "sum(irate(pinchflat_prom_ex_oban_producer_dispatched_count_bucket{job=\"$job\", instance=\"$instance\", name=\"$oban\"}[$interval])) by (le)",
+ "format": "heatmap",
+ "hide": false,
+ "interval": "",
+ "legendFormat": "{{ le }}",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Producer Dispatch Count",
+ "tooltip": {
+ "show": true,
+ "showHistogram": true
+ },
+ "type": "heatmap",
+ "xAxis": {
+ "show": true
+ },
+ "xBucketNumber": null,
+ "xBucketSize": null,
+ "yAxis": {
+ "decimals": null,
+ "format": "locale",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true,
+ "splitFactor": null
+ },
+ "yBucketBound": "auto",
+ "yBucketNumber": null,
+ "yBucketSize": null
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "The average amount of time it took to dispatch jobs to each queue.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 0,
+ "y": 134
+ },
+ "hiddenSeries": false,
+ "id": 67,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "irate(pinchflat_prom_ex_oban_producer_duration_milliseconds_sum{job=\"$job\", instance=\"$instance\", name=\"$oban\"}[$interval]) / irate(pinchflat_prom_ex_oban_producer_duration_milliseconds_count{job=\"$job\", instance=\"$instance\", name=\"$oban\"}[$interval])",
+ "interval": "",
+ "legendFormat": "{{ queue }}",
+ "refId": "A"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Average Dispatch Time",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "ms",
+ "label": "Number of Jobs",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": "",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "The average number of jobs dispatched to each queue.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 12,
+ "y": 134
+ },
+ "hiddenSeries": false,
+ "id": 75,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "irate(pinchflat_prom_ex_oban_producer_dispatched_count_sum{job=\"$job\", instance=\"$instance\", name=\"$oban\"}[$interval]) / irate(pinchflat_prom_ex_oban_producer_dispatched_count_count{job=\"$job\", instance=\"$instance\", name=\"$oban\"}[$interval])",
+ "interval": "",
+ "legendFormat": "{{ queue }}",
+ "refId": "A"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Average Dispatch Count",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": "Number of Jobs",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": "",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "The average amount of time it took to encounter an encounter an error.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 0,
+ "y": 147
+ },
+ "hiddenSeries": false,
+ "id": 76,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "irate(pinchflat_prom_ex_oban_producer_exception_duration_milliseconds_sum{job=\"$job\", instance=\"$instance\", name=\"$oban\"}[$interval]) / irate(pinchflat_prom_ex_oban_producer_exception_duration_milliseconds_count{job=\"$job\", instance=\"$instance\", name=\"$oban\"}[$interval])",
+ "interval": "",
+ "legendFormat": "{{ queue }}",
+ "refId": "A"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Average Exception Duration",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "ms",
+ "label": "Number of Jobs",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": "",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "collapsed": false,
+ "datasource": null,
+ "gridPos": {
+ "h": 1,
+ "w": 24,
+ "x": 0,
+ "y": 160
+ },
+ "id": 65,
+ "panels": [],
+ "title": "Circuit Breaker Details",
+ "type": "row"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "This is the total number of trip events that have been encountered by the selected application and Oban instances.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "noValue": "0",
+ "decimals": 0,
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ },
+ {
+ "color": "red",
+ "value": 80
+ }
+ ]
+ }
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 5,
+ "w": 6,
+ "x": 0,
+ "y": 161
+ },
+ "id": 78,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "none",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["lastNotNull"],
+ "fields": "",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "increase(pinchflat_prom_ex_oban_circuit_trip_total{job=\"$job\", instance=\"$instance\", name=\"$oban\"}[1h])",
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Circuit Trip Events (last 1 hour)",
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "This is the total number of trip events that have been encountered by the selected application and Oban instances.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "noValue": "0",
+ "decimals": 0,
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ },
+ {
+ "color": "red",
+ "value": 80
+ }
+ ]
+ }
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 5,
+ "w": 6,
+ "x": 6,
+ "y": 161
+ },
+ "id": 80,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "none",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["lastNotNull"],
+ "fields": "",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_oban_circuit_trip_total{job=\"$job\", instance=\"$instance\", name=\"$oban\"}",
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Total Circuit Trip Events",
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "This is the total number of open events that have been encountered by the selected application and Oban instances.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "noValue": "0",
+ "decimals": 0,
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ },
+ {
+ "color": "red",
+ "value": 80
+ }
+ ]
+ }
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 5,
+ "w": 6,
+ "x": 12,
+ "y": 161
+ },
+ "id": 81,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "none",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["lastNotNull"],
+ "fields": "",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "increase(pinchflat_prom_ex_oban_circuit_open_total{job=\"$job\", instance=\"$instance\", name=\"$oban\"}[1h])",
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Circuit Open Events (last 1 hour)",
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "This is the total number of open events that have been encountered by the selected application and Oban instances.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "noValue": "0",
+ "decimals": 0,
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ },
+ {
+ "color": "red",
+ "value": 80
+ }
+ ]
+ }
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 5,
+ "w": 6,
+ "x": 18,
+ "y": 161
+ },
+ "id": 79,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "none",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["lastNotNull"],
+ "fields": "",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_oban_circuit_open_total{job=\"$job\", instance=\"$instance\", name=\"$oban\"}",
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Total Circuit Open Events",
+ "type": "stat"
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "The number of circuit breaker trip events that occurred along with what component tripped the breaker.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 0,
+ "y": 166
+ },
+ "hiddenSeries": false,
+ "id": 70,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "increase(pinchflat_prom_ex_oban_circuit_trip_total{job=\"$job\", instance=\"$instance\", name=\"$oban\"}[$interval])",
+ "interval": "",
+ "legendFormat": "{{ circuit_breaker }}",
+ "refId": "A"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Circuit Breaker Trip Events",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": "Number of Circuit Trips",
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": "",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "The number of circuit breaker open events that occurred along with what component tripped the breaker.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 12,
+ "y": 166
+ },
+ "hiddenSeries": false,
+ "id": 72,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "increase(pinchflat_prom_ex_oban_circuit_open_total{job=\"$job\", instance=\"$instance\", name=\"$oban\"}[$interval])",
+ "interval": "",
+ "legendFormat": "{{ circuit_breaker }}",
+ "refId": "A"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Circuit Breaker Open Events",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": "Number of Circuit Opens",
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": "",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ }
+ ],
+ "refresh": "5s",
+ "schemaVersion": 26,
+ "style": "dark",
+ "tags": ["PromEx", "Oban", "pinchflat"],
+ "templating": {
+ "list": [
+ {
+ "allValue": null,
+ "current": {
+ "selected": false,
+ "text": "elixir_app",
+ "value": "elixir_app"
+ },
+ "datasource": "prometheus",
+ "definition": "label_values(pinchflat_prom_ex_prom_ex_status_info, job)",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Prometheus Job",
+ "multi": false,
+ "name": "job",
+ "options": [],
+ "query": "label_values(pinchflat_prom_ex_prom_ex_status_info, job)",
+ "refresh": 2,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 6,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ },
+ {
+ "allValue": null,
+ "current": {
+ "selected": false,
+ "text": "elixir_app_one:4000",
+ "value": "elixir_app_one:4000"
+ },
+ "datasource": "prometheus",
+ "definition": "label_values(pinchflat_prom_ex_prom_ex_status_info, instance)",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Application Instance",
+ "multi": false,
+ "name": "instance",
+ "options": [],
+ "query": "label_values(pinchflat_prom_ex_prom_ex_status_info{job=\"$job\"}, instance)",
+ "refresh": 2,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 0,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ },
+ {
+ "allValue": null,
+ "datasource": "prometheus",
+ "definition": "label_values(pinchflat_prom_ex_oban_init_status_info, name)",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Oban Instance",
+ "multi": false,
+ "name": "oban",
+ "options": [],
+ "query": "label_values(pinchflat_prom_ex_oban_init_status_info, name)",
+ "refresh": 1,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 1,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ },
+ {
+ "auto": false,
+ "auto_count": 30,
+ "auto_min": "10s",
+ "current": {
+ "selected": true,
+ "text": "30s",
+ "value": "30s"
+ },
+ "hide": 0,
+ "label": "Interval",
+ "name": "interval",
+ "options": [
+ {
+ "selected": false,
+ "text": "15s",
+ "value": "15s"
+ },
+ {
+ "selected": true,
+ "text": "30s",
+ "value": "30s"
+ },
+ {
+ "selected": false,
+ "text": "1m",
+ "value": "1m"
+ },
+ {
+ "selected": false,
+ "text": "5m",
+ "value": "5m"
+ },
+ {
+ "selected": false,
+ "text": "15m",
+ "value": "15m"
+ },
+ {
+ "selected": false,
+ "text": "30m",
+ "value": "30m"
+ },
+ {
+ "selected": false,
+ "text": "1h",
+ "value": "1h"
+ }
+ ],
+ "query": "15s, 30s, 1m, 5m, 15m, 30m, 1h",
+ "queryValue": "",
+ "refresh": 2,
+ "skipUrlSync": false,
+ "type": "interval"
+ }
+ ]
+ },
+ "time": {
+ "from": "now-1h",
+ "to": "now"
+ },
+ "timepicker": {
+ "refresh_intervals": ["5s", "10s", "30s", "1m", "5m"]
+ },
+ "timezone": "",
+ "title": "Pinchflat - PromEx Oban Dashboard",
+ "uid": "AF9058A0496CA7FF8D4FA747EEDC7AF3",
+ "version": 1
+}
diff --git a/priv/grafana/phoenix.json b/priv/grafana/phoenix.json
new file mode 100644
index 0000000..95dfa63
--- /dev/null
+++ b/priv/grafana/phoenix.json
@@ -0,0 +1,1978 @@
+{
+ "annotations": {
+ "list": [
+ {
+ "builtIn": 1,
+ "datasource": "-- Grafana --",
+ "enable": true,
+ "hide": true,
+ "iconColor": "rgba(0, 211, 255, 1)",
+ "name": "Annotations & Alerts",
+ "type": "dashboard"
+ },
+ {
+ "datasource": "-- Grafana --",
+ "enable": true,
+ "hide": true,
+ "iconColor": "#73BF69",
+ "limit": 100,
+ "name": "PromEx service start",
+ "showIn": 0,
+ "tags": ["prom_ex", "pinchflat", "start"],
+ "type": "tags"
+ },
+ {
+ "datasource": "-- Grafana --",
+ "enable": true,
+ "hide": true,
+ "iconColor": "#FF9830",
+ "limit": 100,
+ "name": "PromEx service stop",
+ "showIn": 0,
+ "tags": ["prom_ex", "pinchflat", "stop"],
+ "type": "tags"
+ }
+ ]
+ },
+ "description": "All the data that is presented here is captured by the PromEx Phoenix plugin (https://github.com/akoutmos/prom_ex/blob/master/lib/prom_ex/plugins/phoenix.ex)",
+ "editable": false,
+ "gnetId": null,
+ "graphTooltip": 1,
+ "id": null,
+ "links": [
+ {
+ "asDropdown": false,
+ "icon": "bolt",
+ "includeVars": false,
+ "keepTime": false,
+ "tags": [],
+ "targetBlank": true,
+ "title": "Sponsor PromEx",
+ "tooltip": "",
+ "type": "link",
+ "url": "https://github.com/sponsors/akoutmos"
+ },
+ {
+ "asDropdown": false,
+ "icon": "doc",
+ "includeVars": false,
+ "keepTime": false,
+ "tags": [],
+ "targetBlank": true,
+ "title": "Phoenix Plugin Docs",
+ "tooltip": "",
+ "type": "link",
+ "url": "https://hexdocs.pm/prom_ex/PromEx.Plugins.Phoenix.html"
+ }
+ ],
+ "panels": [
+ {
+ "collapsed": false,
+ "datasource": null,
+ "gridPos": {
+ "h": 1,
+ "w": 24,
+ "x": 0,
+ "y": 0
+ },
+ "id": 19,
+ "panels": [],
+ "title": "Overview",
+ "type": "row"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The Phoenix Endpoint module currently active for config metrics.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ },
+ "unit": "none"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 4,
+ "w": 8,
+ "x": 0,
+ "y": 1
+ },
+ "id": 32,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "none",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["last"],
+ "fields": "/^endpoint$/",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_phoenix_endpoint_port_info{job=\"$job\", instance=\"$instance\", endpoint=\"$endpoint\"}",
+ "instant": true,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Server Endpoint",
+ "transformations": [
+ {
+ "id": "labelsToFields",
+ "options": {}
+ }
+ ],
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The port that the server is listening on.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ },
+ "unit": "none"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 4,
+ "w": 8,
+ "x": 8,
+ "y": 1
+ },
+ "id": 30,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "none",
+ "justifyMode": "center",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["last"],
+ "fields": "/^port$/",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "pinchflat_prom_ex_phoenix_endpoint_port_info{job=\"$job\", instance=\"$instance\", endpoint=\"$endpoint\"}",
+ "instant": true,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Server Port",
+ "transformations": [
+ {
+ "id": "labelsToFields",
+ "options": {}
+ }
+ ],
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The URL that the server is configured to be accessed from.",
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "thresholds"
+ },
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ }
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 4,
+ "w": 8,
+ "x": 16,
+ "y": 1
+ },
+ "id": 31,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "none",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["lastNotNull"],
+ "fields": "/^url$/",
+ "values": false
+ },
+ "text": {},
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "exemplar": true,
+ "expr": "pinchflat_prom_ex_phoenix_endpoint_url_info{job=\"$job\", instance=\"$instance\", endpoint=\"$endpoint\"}",
+ "format": "table",
+ "interval": "",
+ "legendFormat": "",
+ "queryType": "randomWalk",
+ "refId": "A"
+ }
+ ],
+ "title": "Server URL",
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The Apdex score of the app based on a satisfactory response time of 500ms and a tolerable response time of 1000ms. This only takes into account how long Phoenix has been handling the request and only requests that resulted in a 2xx status code. The score is based on the last 24 hours of requests.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [],
+ "noValue": "No data",
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ },
+ {
+ "color": "red",
+ "value": 0
+ },
+ {
+ "color": "yellow",
+ "value": 70
+ },
+ {
+ "color": "green",
+ "value": 90
+ }
+ ]
+ },
+ "unit": "percent"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 6,
+ "w": 6,
+ "x": 0,
+ "y": 5
+ },
+ "id": 21,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "area",
+ "justifyMode": "center",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["lastNotNull"],
+ "fields": "",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "(\n (\n sum(increase(pinchflat_prom_ex_phoenix_http_request_duration_milliseconds_bucket{job=\"$job\", instance=\"$instance\", le=\"500\", status=~\"2..\"}[24h])) + \n (sum(increase(pinchflat_prom_ex_phoenix_http_request_duration_milliseconds_bucket{job=\"$job\", instance=\"$instance\", le=\"1000\", status=~\"2..\"}[24h])) - sum(increase(pinchflat_prom_ex_phoenix_http_request_duration_milliseconds_bucket{job=\"$job\", instance=\"$instance\", le=\"500\", status=~\"2..\"}[24h]))) / 2\n ) \n / \n sum(increase(pinchflat_prom_ex_phoenix_http_request_duration_milliseconds_count{job=\"$job\", instance=\"$instance\", status=~\"2..\"}[24h]))\n) * 100",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Apdex Score (Last 24h)",
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "A percentage of responses that resulted in 400s or 500s over the past 24 hours.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [],
+ "thresholds": {
+ "mode": "percentage",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ },
+ {
+ "color": "#EAB839",
+ "value": 1
+ },
+ {
+ "color": "red",
+ "value": 5
+ }
+ ]
+ },
+ "unit": "percentunit"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 6,
+ "w": 6,
+ "x": 6,
+ "y": 5
+ },
+ "id": 22,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "area",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["lastNotNull"],
+ "fields": "",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "sum(increase(pinchflat_prom_ex_phoenix_http_requests_total{job=\"$job\", instance=\"$instance\", status=~\"4..|5..\"}[24h])) / sum(increase(pinchflat_prom_ex_phoenix_http_requests_total{job=\"$job\", instance=\"$instance\"}[24h])) OR on() vector(0)",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Error Percentage (Last 24h)",
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The amount of data transferred by Phoenix in a 24 hour rolling window.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ },
+ "unit": "decbytes"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 6,
+ "w": 6,
+ "x": 12,
+ "y": 5
+ },
+ "id": 24,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "area",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["lastNotNull"],
+ "fields": "",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "sum(increase(pinchflat_prom_ex_phoenix_http_response_size_bytes_sum{job=\"$job\", instance=\"$instance\"}[24h]))",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Data Transferred (Last 24h)",
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The amount of requests received by Phoenix in a 24 hour rolling window.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ },
+ "unit": "locale"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 6,
+ "w": 6,
+ "x": 18,
+ "y": 5
+ },
+ "id": 23,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "area",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["lastNotNull"],
+ "fields": "",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "round(sum(increase(pinchflat_prom_ex_phoenix_http_requests_total{job=\"$job\", instance=\"$instance\"}[24h])))",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Total Requests Received (Last 24h)",
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The Apdex score of the app based on a satisfactory response time of 500ms and a tolerable response time of 1000ms. This only takes into account how long Phoenix has been handling the request and only requests that resulted in a 2xx status code. The score is based on the previous hour of requests.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [],
+ "noValue": "No data",
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ },
+ {
+ "color": "red",
+ "value": 0
+ },
+ {
+ "color": "yellow",
+ "value": 70
+ },
+ {
+ "color": "green",
+ "value": 90
+ }
+ ]
+ },
+ "unit": "percent"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 6,
+ "w": 6,
+ "x": 0,
+ "y": 11
+ },
+ "id": 25,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "area",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["lastNotNull"],
+ "fields": "",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "(\n (\n sum(increase(pinchflat_prom_ex_phoenix_http_request_duration_milliseconds_bucket{job=\"$job\", instance=\"$instance\", le=\"500\", status=~\"2..\"}[1h])) + \n (sum(increase(pinchflat_prom_ex_phoenix_http_request_duration_milliseconds_bucket{job=\"$job\", instance=\"$instance\", le=\"1000\", status=~\"2..\"}[1h])) - sum(increase(pinchflat_prom_ex_phoenix_http_request_duration_milliseconds_bucket{job=\"$job\", instance=\"$instance\", le=\"500\", status=~\"2..\"}[1h]))) / 2\n ) \n / \n sum(increase(pinchflat_prom_ex_phoenix_http_request_duration_milliseconds_count{job=\"$job\", instance=\"$instance\", status=~\"2..\"}[1h]))\n) * 100",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Apdex Score (Last 1h)",
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "A percentage of responses that resulted in 400s or 500s over the past hour.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [],
+ "noValue": "No data",
+ "thresholds": {
+ "mode": "percentage",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ },
+ {
+ "color": "#EAB839",
+ "value": 1
+ },
+ {
+ "color": "red",
+ "value": 5
+ }
+ ]
+ },
+ "unit": "percentunit"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 6,
+ "w": 6,
+ "x": 6,
+ "y": 11
+ },
+ "id": 26,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "area",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["lastNotNull"],
+ "fields": "",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "sum(increase(pinchflat_prom_ex_phoenix_http_requests_total{job=\"$job\", instance=\"$instance\", status=~\"4..|5..\"}[1h])) / sum(increase(pinchflat_prom_ex_phoenix_http_requests_total{job=\"$job\", instance=\"$instance\"}[1h])) OR on() vector(0)",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Error Percentage (Last 1h)",
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The amount of data transferred by Phoenix in the past hour.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ },
+ "unit": "decbytes"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 6,
+ "w": 6,
+ "x": 12,
+ "y": 11
+ },
+ "id": 27,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "area",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["lastNotNull"],
+ "fields": "",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "sum(increase(pinchflat_prom_ex_phoenix_http_response_size_bytes_sum{job=\"$job\", instance=\"$instance\"}[1h]))",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Data Transferred (Last 1h)",
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The amount of requests received by Phoenix in the past hour.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ },
+ "unit": "locale"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 6,
+ "w": 6,
+ "x": 18,
+ "y": 11
+ },
+ "id": 28,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "area",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["lastNotNull"],
+ "fields": "",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "round(sum(increase(pinchflat_prom_ex_phoenix_http_requests_total{job=\"$job\", instance=\"$instance\"}[1h])))",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Total Requests Received (Last 1h)",
+ "type": "stat"
+ },
+ {
+ "collapsed": false,
+ "datasource": null,
+ "gridPos": {
+ "h": 1,
+ "w": 24,
+ "x": 0,
+ "y": 17
+ },
+ "id": 2,
+ "panels": [],
+ "title": "HTTP Details",
+ "type": "row"
+ },
+ {
+ "cards": {
+ "cardPadding": null,
+ "cardRound": null
+ },
+ "color": {
+ "cardColor": "#b4ff00",
+ "colorScale": "sqrt",
+ "colorScheme": "interpolateOranges",
+ "exponent": 0.5,
+ "mode": "spectrum"
+ },
+ "dataFormat": "tsbuckets",
+ "datasource": "prometheus",
+ "description": "A heatmap showing the request time spread across all requests (regardless of path).",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 0,
+ "y": 18
+ },
+ "heatmap": {},
+ "hideZeroBuckets": true,
+ "highlightCards": true,
+ "id": 6,
+ "legend": {
+ "show": true
+ },
+ "pluginVersion": "7.1.3",
+ "reverseYBuckets": false,
+ "targets": [
+ {
+ "expr": "sum(irate(pinchflat_prom_ex_phoenix_http_request_duration_milliseconds_bucket{job=\"$job\", instance=\"$instance\"}[$interval])) by (le)",
+ "format": "heatmap",
+ "hide": false,
+ "interval": "",
+ "legendFormat": "{{ le }}",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Request Time",
+ "tooltip": {
+ "show": true,
+ "showHistogram": true
+ },
+ "type": "heatmap",
+ "xAxis": {
+ "show": true
+ },
+ "xBucketNumber": null,
+ "xBucketSize": null,
+ "yAxis": {
+ "decimals": null,
+ "format": "ms",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true,
+ "splitFactor": null
+ },
+ "yBucketBound": "auto",
+ "yBucketNumber": null,
+ "yBucketSize": null
+ },
+ {
+ "cards": {
+ "cardPadding": null,
+ "cardRound": null
+ },
+ "color": {
+ "cardColor": "#b4ff00",
+ "colorScale": "sqrt",
+ "colorScheme": "interpolateOranges",
+ "exponent": 0.5,
+ "mode": "spectrum"
+ },
+ "dataFormat": "tsbuckets",
+ "datasource": "prometheus",
+ "description": "A heatmap showing the response payload size spread across all requests (regardless of path).",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 12,
+ "y": 18
+ },
+ "heatmap": {},
+ "hideZeroBuckets": true,
+ "highlightCards": true,
+ "id": 12,
+ "legend": {
+ "show": true
+ },
+ "pluginVersion": "7.1.3",
+ "reverseYBuckets": false,
+ "targets": [
+ {
+ "expr": "sum(irate(pinchflat_prom_ex_phoenix_http_response_size_bytes_bucket{job=\"$job\", instance=\"$instance\"}[$interval])) by (le)",
+ "format": "heatmap",
+ "hide": false,
+ "interval": "",
+ "legendFormat": "{{ le }}",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Response Payload Size",
+ "tooltip": {
+ "show": true,
+ "showHistogram": true
+ },
+ "type": "heatmap",
+ "xAxis": {
+ "show": true
+ },
+ "xBucketNumber": null,
+ "xBucketSize": null,
+ "yAxis": {
+ "decimals": null,
+ "format": "decbytes",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true,
+ "splitFactor": null
+ },
+ "yBucketBound": "auto",
+ "yBucketNumber": null,
+ "yBucketSize": null
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "The average request time per path per status code per HTTP method.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 0,
+ "y": 31
+ },
+ "hiddenSeries": false,
+ "id": 11,
+ "legend": {
+ "alignAsTable": true,
+ "avg": false,
+ "current": true,
+ "hideEmpty": false,
+ "hideZero": false,
+ "max": true,
+ "min": false,
+ "rightSide": true,
+ "show": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null as zero",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "sum(irate(pinchflat_prom_ex_phoenix_http_request_duration_milliseconds_sum{job=\"$job\", instance=\"$instance\"}[$interval])) by(path, status) / sum(irate(pinchflat_prom_ex_phoenix_http_request_duration_milliseconds_count{job=\"$job\", instance=\"$instance\"}[$interval])) by(path, status)",
+ "interval": "",
+ "legendFormat": "{{ method }} {{ path }} :: {{ status }}",
+ "refId": "A"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Average HTTP Request Time",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "ms",
+ "label": "Response Time",
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "The average response size per path per status code per HTTP method.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 12,
+ "y": 31
+ },
+ "hiddenSeries": false,
+ "id": 13,
+ "legend": {
+ "alignAsTable": true,
+ "avg": false,
+ "current": true,
+ "hideEmpty": false,
+ "hideZero": false,
+ "max": true,
+ "min": false,
+ "rightSide": true,
+ "show": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null as zero",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "sum(irate(pinchflat_prom_ex_phoenix_http_response_size_bytes_sum{job=\"$job\", instance=\"$instance\"}[$interval])) by(path, status) / sum(irate(pinchflat_prom_ex_phoenix_http_response_size_bytes_count{job=\"$job\", instance=\"$instance\"}[$interval])) by(path, status)",
+ "interval": "",
+ "legendFormat": "{{ method }} {{ path }} :: {{ status }}",
+ "refId": "A"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Average HTTP Response Size",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "decbytes",
+ "label": "Response Size",
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "Shows the number of requests coming into certain paths and the resulting response codes.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 0,
+ "y": 44
+ },
+ "hiddenSeries": false,
+ "id": 8,
+ "legend": {
+ "alignAsTable": true,
+ "avg": false,
+ "current": true,
+ "hideEmpty": false,
+ "hideZero": false,
+ "max": true,
+ "min": false,
+ "rightSide": true,
+ "show": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null as zero",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "irate(pinchflat_prom_ex_phoenix_http_requests_total{job=\"$job\", instance=\"$instance\"}[$interval])",
+ "interval": "",
+ "legendFormat": "{{ method }} {{ path }} :: {{ status }}",
+ "refId": "A"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Path Requests",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "locale",
+ "label": "Requests",
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "The aggregate response status of all the requests.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 12,
+ "y": 44
+ },
+ "hiddenSeries": false,
+ "id": 10,
+ "legend": {
+ "alignAsTable": true,
+ "avg": false,
+ "current": true,
+ "max": true,
+ "min": false,
+ "rightSide": true,
+ "show": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null as zero",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "sum(irate(pinchflat_prom_ex_phoenix_http_requests_total{status=~\"2..\", job=\"$job\", instance=\"$instance\"}[$interval]))",
+ "interval": "",
+ "legendFormat": "2xx",
+ "refId": "A"
+ },
+ {
+ "expr": "sum(irate(pinchflat_prom_ex_phoenix_http_requests_total{status=~\"4..\", job=\"$job\", instance=\"$instance\"}[$interval]))",
+ "interval": "",
+ "legendFormat": "4xx",
+ "refId": "B"
+ },
+ {
+ "expr": "sum(irate(pinchflat_prom_ex_phoenix_http_requests_total{status=~\"5..\", job=\"$job\", instance=\"$instance\"}[$interval]))",
+ "interval": "",
+ "legendFormat": "5xx",
+ "refId": "C"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Response Status Codes",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": "Requests",
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "collapsed": false,
+ "datasource": null,
+ "gridPos": {
+ "h": 1,
+ "w": 24,
+ "x": 0,
+ "y": 57
+ },
+ "id": 4,
+ "panels": [],
+ "title": "Channel Details",
+ "type": "row"
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "How many channel joins have occurred over time.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 0,
+ "y": 58
+ },
+ "hiddenSeries": false,
+ "id": 15,
+ "legend": {
+ "alignAsTable": true,
+ "avg": false,
+ "current": true,
+ "max": true,
+ "min": false,
+ "rightSide": true,
+ "show": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null as zero",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "irate(pinchflat_prom_ex_phoenix_channel_joined_total{job=\"$job\", instance=\"$instance\", endpoint=\"$endpoint\"}[$interval])",
+ "interval": "",
+ "legendFormat": "{{ transport }} :: {{ result }}",
+ "refId": "A"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Channel Join Events",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": "Channel joins",
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "cards": {
+ "cardPadding": null,
+ "cardRound": null
+ },
+ "color": {
+ "cardColor": "#b4ff00",
+ "colorScale": "sqrt",
+ "colorScheme": "interpolateOranges",
+ "exponent": 0.5,
+ "mode": "spectrum"
+ },
+ "dataFormat": "tsbuckets",
+ "datasource": "prometheus",
+ "description": "A heatmap showing the request time spread across all channel events.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 12,
+ "y": 58
+ },
+ "heatmap": {},
+ "hideZeroBuckets": true,
+ "highlightCards": true,
+ "id": 16,
+ "legend": {
+ "show": true
+ },
+ "pluginVersion": "7.1.3",
+ "reverseYBuckets": false,
+ "targets": [
+ {
+ "expr": "sum(irate(pinchflat_prom_ex_phoenix_channel_handled_in_duration_milliseconds_bucket{job=\"$job\", instance=\"$instance\", endpoint=\"$endpoint\"}[$interval])) by (le)",
+ "format": "heatmap",
+ "hide": false,
+ "interval": "",
+ "legendFormat": "{{ le }}",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Channel Message Handle Time",
+ "tooltip": {
+ "show": true,
+ "showHistogram": true
+ },
+ "type": "heatmap",
+ "xAxis": {
+ "show": true
+ },
+ "xBucketNumber": null,
+ "xBucketSize": null,
+ "yAxis": {
+ "decimals": null,
+ "format": "ms",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true,
+ "splitFactor": null
+ },
+ "yBucketBound": "auto",
+ "yBucketNumber": null,
+ "yBucketSize": null
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "The average response time for a channel message.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 0,
+ "y": 71
+ },
+ "hiddenSeries": false,
+ "id": 17,
+ "legend": {
+ "alignAsTable": true,
+ "avg": false,
+ "current": true,
+ "hideEmpty": false,
+ "hideZero": false,
+ "max": true,
+ "min": false,
+ "rightSide": true,
+ "show": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null as zero",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "irate(pinchflat_prom_ex_phoenix_channel_handled_in_duration_milliseconds_sum{job=\"$job\", instance=\"$instance\", endpoint=\"$endpoint\"}[$interval]) / irate(pinchflat_prom_ex_phoenix_channel_handled_in_duration_milliseconds_count{job=\"$job\", instance=\"$instance\", endpoint=\"$endpoint\"}[$interval])",
+ "interval": "",
+ "legendFormat": "{{ transport }} :: {{ result }}",
+ "refId": "A"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Average Channel Response Time",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "ms",
+ "label": "Response Time",
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "collapsed": false,
+ "datasource": null,
+ "gridPos": {
+ "h": 1,
+ "w": 24,
+ "x": 0,
+ "y": 84
+ },
+ "id": 34,
+ "panels": [],
+ "title": "Socket Details",
+ "type": "row"
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "How many socket connections have occurred over time.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 0,
+ "y": 85
+ },
+ "hiddenSeries": false,
+ "id": 35,
+ "legend": {
+ "alignAsTable": true,
+ "avg": false,
+ "current": true,
+ "max": true,
+ "min": false,
+ "rightSide": true,
+ "show": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null as zero",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "irate(pinchflat_prom_ex_phoenix_socket_connected_duration_milliseconds_count{job=\"$job\", instance=\"$instance\", endpoint=\"$endpoint\"}[$interval])",
+ "interval": "",
+ "legendFormat": "{{ transport }} :: {{ result }}",
+ "refId": "A"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Socket Connection Events",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": "Channel joins",
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "cards": {
+ "cardPadding": null,
+ "cardRound": null
+ },
+ "color": {
+ "cardColor": "#b4ff00",
+ "colorScale": "sqrt",
+ "colorScheme": "interpolateOranges",
+ "exponent": 0.5,
+ "mode": "spectrum"
+ },
+ "dataFormat": "tsbuckets",
+ "datasource": "prometheus",
+ "description": "A heatmap showing the request time spread across all socket connections.",
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 12,
+ "y": 85
+ },
+ "heatmap": {},
+ "hideZeroBuckets": true,
+ "highlightCards": true,
+ "id": 36,
+ "legend": {
+ "show": true
+ },
+ "pluginVersion": "7.1.3",
+ "reverseYBuckets": false,
+ "targets": [
+ {
+ "exemplar": true,
+ "expr": "sum(irate(pinchflat_prom_ex_phoenix_socket_connected_duration_milliseconds_bucket{job=\"$job\", instance=\"$instance\", endpoint=\"$endpoint\"}[$interval])) by (le)",
+ "format": "heatmap",
+ "hide": false,
+ "interval": "",
+ "legendFormat": "{{ le }}",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Socket Connection Time",
+ "tooltip": {
+ "show": true,
+ "showHistogram": true
+ },
+ "type": "heatmap",
+ "xAxis": {
+ "show": true
+ },
+ "xBucketNumber": null,
+ "xBucketSize": null,
+ "yAxis": {
+ "decimals": null,
+ "format": "ms",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true,
+ "splitFactor": null
+ },
+ "yBucketBound": "auto",
+ "yBucketNumber": null,
+ "yBucketSize": null
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "The average time it connects to establish a socket connection.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 0,
+ "y": 98
+ },
+ "hiddenSeries": false,
+ "id": 37,
+ "legend": {
+ "alignAsTable": true,
+ "avg": false,
+ "current": true,
+ "hideEmpty": false,
+ "hideZero": false,
+ "max": true,
+ "min": false,
+ "rightSide": true,
+ "show": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null as zero",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "irate(pinchflat_prom_ex_phoenix_socket_connected_duration_milliseconds_sum{job=\"$job\", instance=\"$instance\", endpoint=\"$endpoint\"}[$interval]) / irate(pinchflat_prom_ex_phoenix_socket_connected_duration_milliseconds_count{job=\"$job\", instance=\"$instance\", endpoint=\"$endpoint\"}[$interval])",
+ "interval": "",
+ "legendFormat": "{{ transport }} :: {{ result }}",
+ "refId": "A"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Average Socket Connection Time",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "ms",
+ "label": "Response Time",
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ }
+ ],
+ "refresh": "5s",
+ "schemaVersion": 26,
+ "style": "dark",
+ "tags": ["PromEx", "Phoenix", "pinchflat"],
+ "templating": {
+ "list": [
+ {
+ "allValue": null,
+ "datasource": "prometheus",
+ "definition": "label_values(pinchflat_prom_ex_prom_ex_status_info, job)",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Prometheus Job",
+ "multi": false,
+ "name": "job",
+ "options": [],
+ "query": "label_values(pinchflat_prom_ex_prom_ex_status_info, job)",
+ "refresh": 2,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 6,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ },
+ {
+ "allValue": null,
+ "datasource": "prometheus",
+ "definition": "label_values(pinchflat_prom_ex_prom_ex_status_info, instance)",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Application Instance",
+ "multi": false,
+ "name": "instance",
+ "options": [],
+ "query": "label_values(pinchflat_prom_ex_prom_ex_status_info{job=\"$job\"}, instance)",
+ "refresh": 2,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 0,
+ "tagValuesQuery": "",
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ },
+ {
+ "allValue": null,
+ "datasource": "prometheus",
+ "definition": "label_values(pinchflat_prom_ex_phoenix_endpoint_port_info, endpoint)",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Phoenix Endpoint",
+ "multi": false,
+ "name": "endpoint",
+ "options": [],
+ "query": "label_values(pinchflat_prom_ex_phoenix_endpoint_port_info, endpoint)",
+ "refresh": 2,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 0,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ },
+ {
+ "auto": false,
+ "auto_count": 30,
+ "auto_min": "10s",
+ "current": {
+ "selected": false,
+ "text": "30s",
+ "value": "30s"
+ },
+ "hide": 0,
+ "label": "Interval",
+ "name": "interval",
+ "options": [
+ {
+ "selected": false,
+ "text": "15s",
+ "value": "15s"
+ },
+ {
+ "selected": true,
+ "text": "30s",
+ "value": "30s"
+ },
+ {
+ "selected": false,
+ "text": "1m",
+ "value": "1m"
+ },
+ {
+ "selected": false,
+ "text": "5m",
+ "value": "5m"
+ },
+ {
+ "selected": false,
+ "text": "15m",
+ "value": "15m"
+ },
+ {
+ "selected": false,
+ "text": "30m",
+ "value": "30m"
+ },
+ {
+ "selected": false,
+ "text": "1h",
+ "value": "1h"
+ }
+ ],
+ "query": "15s, 30s, 1m, 5m, 15m, 30m, 1h",
+ "queryValue": "",
+ "refresh": 2,
+ "skipUrlSync": false,
+ "type": "interval"
+ }
+ ]
+ },
+ "time": {
+ "from": "now-1h",
+ "to": "now"
+ },
+ "timepicker": {
+ "refresh_intervals": ["5s", "10s", "30s", "1m", "5m"]
+ },
+ "timezone": "",
+ "title": "Pinchflat - PromEx Phoenix Dashboard",
+ "uid": "970297EC2ACFCF6777A4D3444B63C036",
+ "version": 1
+}
diff --git a/priv/grafana/phoenix_live_view.json b/priv/grafana/phoenix_live_view.json
new file mode 100644
index 0000000..edef6f7
--- /dev/null
+++ b/priv/grafana/phoenix_live_view.json
@@ -0,0 +1,1380 @@
+{
+ "annotations": {
+ "list": [
+ {
+ "builtIn": 1,
+ "datasource": "-- Grafana --",
+ "enable": true,
+ "hide": true,
+ "iconColor": "rgba(0, 211, 255, 1)",
+ "name": "Annotations & Alerts",
+ "type": "dashboard"
+ },
+ {
+ "datasource": "-- Grafana --",
+ "enable": true,
+ "hide": true,
+ "iconColor": "#73BF69",
+ "limit": 100,
+ "name": "PromEx service start",
+ "showIn": 0,
+ "tags": ["prom_ex", "pinchflat", "start"],
+ "type": "tags"
+ },
+ {
+ "datasource": "-- Grafana --",
+ "enable": true,
+ "hide": true,
+ "iconColor": "#FF9830",
+ "limit": 100,
+ "name": "PromEx service stop",
+ "showIn": 0,
+ "tags": ["prom_ex", "pinchflat", "stop"],
+ "type": "tags"
+ }
+ ]
+ },
+ "description": "All the data that is presented here is captured by the PromEx Phoenix LiveView plugin (https://github.com/akoutmos/prom_ex/blob/master/lib/prom_ex/plugins/phoenix_live_view.ex)",
+ "editable": false,
+ "gnetId": null,
+ "graphTooltip": 1,
+ "id": null,
+ "links": [
+ {
+ "asDropdown": false,
+ "icon": "bolt",
+ "includeVars": false,
+ "keepTime": false,
+ "tags": [],
+ "targetBlank": true,
+ "title": "Sponsor PromEx",
+ "tooltip": "",
+ "type": "link",
+ "url": "https://github.com/sponsors/akoutmos"
+ },
+ {
+ "asDropdown": false,
+ "icon": "doc",
+ "includeVars": false,
+ "keepTime": false,
+ "tags": [],
+ "targetBlank": true,
+ "title": "PhoenixLiveView Plugin Docs",
+ "tooltip": "",
+ "type": "link",
+ "url": "https://hexdocs.pm/prom_ex/PromEx.Plugins.PhoenixLiveView.html"
+ }
+ ],
+ "panels": [
+ {
+ "collapsed": false,
+ "datasource": null,
+ "gridPos": {
+ "h": 1,
+ "w": 24,
+ "x": 0,
+ "y": 0
+ },
+ "id": 19,
+ "panels": [],
+ "title": "Overview",
+ "type": "row"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "A percentage of mount callbacks that successfully executed.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "decimals": 1,
+ "mappings": [],
+ "thresholds": {
+ "mode": "percentage",
+ "steps": [
+ {
+ "color": "red",
+ "value": null
+ },
+ {
+ "color": "#EAB839",
+ "value": 80
+ },
+ {
+ "color": "green",
+ "value": 95
+ }
+ ]
+ },
+ "unit": "percentunit"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 5,
+ "w": 6,
+ "x": 0,
+ "y": 1
+ },
+ "id": 22,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "area",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["lastNotNull"],
+ "fields": "",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "sum(increase(pinchflat_prom_ex_phoenix_live_view_mount_duration_milliseconds_count{job=\"$job\", instance=\"$instance\"}[24h])) / \n(\n sum(increase(pinchflat_prom_ex_phoenix_live_view_mount_duration_milliseconds_count{job=\"$job\", instance=\"$instance\"}[24h])) + \n (sum(increase(pinchflat_prom_ex_phoenix_live_view_mount_exception_duration_milliseconds_count{job=\"$job\", instance=\"$instance\"}[24h])) or vector(0))\n)",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Mount Callback Success Rate (last 24h)",
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The amount of LiveView mounts that have occurred in the last 24 hours.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "decimals": 0,
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ },
+ "unit": "locale"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 5,
+ "w": 6,
+ "x": 6,
+ "y": 1
+ },
+ "id": 23,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "area",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["lastNotNull"],
+ "fields": "",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "(sum(increase(pinchflat_prom_ex_phoenix_live_view_mount_duration_milliseconds_count{job=\"$job\", instance=\"$instance\"}[24h])) or vector(0)) + (sum(increase(pinchflat_prom_ex_phoenix_live_view_mount_exception_duration_milliseconds_count{job=\"$job\", instance=\"$instance\"}[24h])) or vector(0))",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Total LiveView Mounts (Last 24h)",
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "A percentage of handle_event callbacks that successfully executed.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "decimals": 1,
+ "mappings": [],
+ "thresholds": {
+ "mode": "percentage",
+ "steps": [
+ {
+ "color": "red",
+ "value": null
+ },
+ {
+ "color": "#EAB839",
+ "value": 80
+ },
+ {
+ "color": "green",
+ "value": 95
+ }
+ ]
+ },
+ "unit": "percentunit"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 5,
+ "w": 6,
+ "x": 12,
+ "y": 1
+ },
+ "id": 35,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "area",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["lastNotNull"],
+ "fields": "",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "sum(increase(pinchflat_prom_ex_phoenix_live_view_handle_event_duration_milliseconds_count{job=\"$job\", instance=\"$instance\"}[24h])) / \n(\n sum(increase(pinchflat_prom_ex_phoenix_live_view_handle_event_duration_milliseconds_count{job=\"$job\", instance=\"$instance\"}[24h])) + \n (sum(increase(pinchflat_prom_ex_phoenix_live_view_handle_event_exception_duration_milliseconds_count{job=\"$job\", instance=\"$instance\"}[24h])) or vector(0))\n)",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Handle Event Callback Success Rate (last 24h)",
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The amount of LiveView handle_events that have occurred in the last 24 hours.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "decimals": 0,
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ },
+ "unit": "locale"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 5,
+ "w": 6,
+ "x": 18,
+ "y": 1
+ },
+ "id": 33,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "area",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["lastNotNull"],
+ "fields": "/^Value$/",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "(sum(increase(pinchflat_prom_ex_phoenix_live_view_handle_event_exception_duration_milliseconds_count{job=\"$job\", instance=\"$instance\"}[24h])) or vector(0)) + (sum(increase(pinchflat_prom_ex_phoenix_live_view_handle_event_duration_milliseconds_count{job=\"$job\", instance=\"$instance\"}[24h])) or vector(0))",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Total LiveView Handle Events (Last 24h)",
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "A percentage of mount callbacks that successfully executed.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "decimals": 1,
+ "mappings": [],
+ "thresholds": {
+ "mode": "percentage",
+ "steps": [
+ {
+ "color": "red",
+ "value": null
+ },
+ {
+ "color": "#EAB839",
+ "value": 80
+ },
+ {
+ "color": "green",
+ "value": 95
+ }
+ ]
+ },
+ "unit": "percentunit"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 5,
+ "w": 6,
+ "x": 0,
+ "y": 6
+ },
+ "id": 31,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "area",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["lastNotNull"],
+ "fields": "",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "sum(increase(pinchflat_prom_ex_phoenix_live_view_mount_duration_milliseconds_count{job=\"$job\", instance=\"$instance\"}[1h])) / \n(\n sum(increase(pinchflat_prom_ex_phoenix_live_view_mount_duration_milliseconds_count{job=\"$job\", instance=\"$instance\"}[1h])) + \n (sum(increase(pinchflat_prom_ex_phoenix_live_view_mount_exception_duration_milliseconds_count{job=\"$job\", instance=\"$instance\"}[1h])) or vector(0))\n)",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Mount Callback Success Rate (last 1h)",
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The amount of LiveView mounts that have occurred in the last hour.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "decimals": 0,
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ },
+ "unit": "locale"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 5,
+ "w": 6,
+ "x": 6,
+ "y": 6
+ },
+ "id": 32,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "area",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["lastNotNull"],
+ "fields": "",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "(sum(increase(pinchflat_prom_ex_phoenix_live_view_mount_duration_milliseconds_count{job=\"$job\", instance=\"$instance\"}[1h])) or vector(0)) + (sum(increase(pinchflat_prom_ex_phoenix_live_view_mount_exception_duration_milliseconds_count{job=\"$job\", instance=\"$instance\"}[1h])) or vector(0))",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Total LiveView Mounts (Last 1h)",
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "A percentage of handle event callbacks that successfully executed.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "decimals": 1,
+ "mappings": [],
+ "thresholds": {
+ "mode": "percentage",
+ "steps": [
+ {
+ "color": "red",
+ "value": null
+ },
+ {
+ "color": "#EAB839",
+ "value": 80
+ },
+ {
+ "color": "green",
+ "value": 95
+ }
+ ]
+ },
+ "unit": "percentunit"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 5,
+ "w": 6,
+ "x": 12,
+ "y": 6
+ },
+ "id": 36,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "area",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["lastNotNull"],
+ "fields": "",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "sum(increase(pinchflat_prom_ex_phoenix_live_view_handle_event_duration_milliseconds_count{job=\"$job\", instance=\"$instance\"}[1h])) / \n(\n sum(increase(pinchflat_prom_ex_phoenix_live_view_handle_event_duration_milliseconds_count{job=\"$job\", instance=\"$instance\"}[1h])) + \n (sum(increase(pinchflat_prom_ex_phoenix_live_view_handle_event_exception_duration_milliseconds_count{job=\"$job\", instance=\"$instance\"}[1h])) or vector(0))\n)",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Handle Event Callback Success Rate (last 1h)",
+ "type": "stat"
+ },
+ {
+ "datasource": "prometheus",
+ "description": "The amount of LiveView handle_events that have occurred in the last hour.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {},
+ "decimals": 0,
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green",
+ "value": null
+ }
+ ]
+ },
+ "unit": "locale"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 5,
+ "w": 6,
+ "x": 18,
+ "y": 6
+ },
+ "id": 34,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "area",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "reduceOptions": {
+ "calcs": ["lastNotNull"],
+ "fields": "/^Value$/",
+ "values": false
+ },
+ "textMode": "auto"
+ },
+ "pluginVersion": "7.1.3",
+ "targets": [
+ {
+ "expr": "(sum(increase(pinchflat_prom_ex_phoenix_live_view_handle_event_exception_duration_milliseconds_count{job=\"$job\", instance=\"$instance\"}[1h])) or vector(0)) + (sum(increase(pinchflat_prom_ex_phoenix_live_view_handle_event_duration_milliseconds_count{job=\"$job\", instance=\"$instance\"}[1h])) or vector(0))",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Total LiveView Handle Events (Last 1h)",
+ "type": "stat"
+ },
+ {
+ "collapsed": false,
+ "datasource": null,
+ "gridPos": {
+ "h": 1,
+ "w": 24,
+ "x": 0,
+ "y": 11
+ },
+ "id": 2,
+ "panels": [],
+ "title": "Mount Callback Details",
+ "type": "row"
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "The average time it took to complete the mount callback function.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 0,
+ "y": 12
+ },
+ "hiddenSeries": false,
+ "id": 11,
+ "legend": {
+ "alignAsTable": true,
+ "avg": false,
+ "current": true,
+ "hideEmpty": false,
+ "hideZero": false,
+ "max": true,
+ "min": false,
+ "rightSide": true,
+ "show": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null as zero",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "sum(irate(pinchflat_prom_ex_phoenix_live_view_mount_duration_milliseconds_sum{job=\"$job\", instance=\"$instance\"}[$interval])) by(action, module) / sum(irate(pinchflat_prom_ex_phoenix_live_view_mount_duration_milliseconds_count{job=\"$job\", instance=\"$instance\"}[$interval])) by(action, module)",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "({{ action }}) {{ module }}",
+ "refId": "A"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Average Execution Time",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "ms",
+ "label": "",
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "The average time it took to get through the mount callback function when an error was encountered.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 12,
+ "y": 12
+ },
+ "hiddenSeries": false,
+ "id": 29,
+ "legend": {
+ "alignAsTable": true,
+ "avg": false,
+ "current": true,
+ "hideEmpty": false,
+ "hideZero": false,
+ "max": true,
+ "min": false,
+ "rightSide": true,
+ "show": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null as zero",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "sum(irate(pinchflat_prom_ex_phoenix_live_view_mount_exception_duration_milliseconds_sum{job=\"$job\", instance=\"$instance\"}[$interval])) by(action, module, kind, reason) / sum(irate(pinchflat_prom_ex_phoenix_live_view_mount_exception_duration_milliseconds_count{job=\"$job\", instance=\"$instance\"}[$interval])) by(action, module, kind, reason)",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "({{ action }}) {{ module }} :: ({{kind}} -> {{reason}})",
+ "refId": "A"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Average Exception Execution Time",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "ms",
+ "label": "",
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "cards": {
+ "cardPadding": null,
+ "cardRound": null
+ },
+ "color": {
+ "cardColor": "#b4ff00",
+ "colorScale": "sqrt",
+ "colorScheme": "interpolateOranges",
+ "exponent": 0.5,
+ "mode": "spectrum"
+ },
+ "dataFormat": "tsbuckets",
+ "datasource": "prometheus",
+ "description": "A heatmap showing the time it took to get through the mount callback.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 0,
+ "y": 25
+ },
+ "heatmap": {},
+ "hideZeroBuckets": true,
+ "highlightCards": true,
+ "id": 6,
+ "legend": {
+ "show": true
+ },
+ "pluginVersion": "7.1.3",
+ "reverseYBuckets": false,
+ "targets": [
+ {
+ "expr": "sum(irate(pinchflat_prom_ex_phoenix_live_view_mount_duration_milliseconds_bucket{job=\"$job\", instance=\"$instance\"}[$interval])) by (le)",
+ "format": "heatmap",
+ "hide": false,
+ "interval": "",
+ "legendFormat": "{{ le }}",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Execution Time",
+ "tooltip": {
+ "show": true,
+ "showHistogram": true
+ },
+ "type": "heatmap",
+ "xAxis": {
+ "show": true
+ },
+ "xBucketNumber": null,
+ "xBucketSize": null,
+ "yAxis": {
+ "decimals": null,
+ "format": "ms",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true,
+ "splitFactor": null
+ },
+ "yBucketBound": "auto",
+ "yBucketNumber": null,
+ "yBucketSize": null
+ },
+ {
+ "cards": {
+ "cardPadding": null,
+ "cardRound": null
+ },
+ "color": {
+ "cardColor": "#b4ff00",
+ "colorScale": "sqrt",
+ "colorScheme": "interpolateOranges",
+ "exponent": 0.5,
+ "mode": "spectrum"
+ },
+ "dataFormat": "tsbuckets",
+ "datasource": "prometheus",
+ "description": "A heatmap showing the time it took to get through the mount callback when an exception was encountered.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 12,
+ "y": 25
+ },
+ "heatmap": {},
+ "hideZeroBuckets": true,
+ "highlightCards": true,
+ "id": 30,
+ "legend": {
+ "show": true
+ },
+ "pluginVersion": "7.1.3",
+ "reverseYBuckets": false,
+ "targets": [
+ {
+ "expr": "sum(irate(pinchflat_prom_ex_phoenix_live_view_mount_exception_duration_milliseconds_bucket{job=\"$job\", instance=\"$instance\"}[$interval])) by (le)",
+ "format": "heatmap",
+ "hide": false,
+ "interval": "",
+ "legendFormat": "{{ le }}",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Exception Execution Time",
+ "tooltip": {
+ "show": true,
+ "showHistogram": true
+ },
+ "type": "heatmap",
+ "xAxis": {
+ "show": true
+ },
+ "xBucketNumber": null,
+ "xBucketSize": null,
+ "yAxis": {
+ "decimals": null,
+ "format": "ms",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true,
+ "splitFactor": null
+ },
+ "yBucketBound": "auto",
+ "yBucketNumber": null,
+ "yBucketSize": null
+ },
+ {
+ "collapsed": false,
+ "datasource": null,
+ "gridPos": {
+ "h": 1,
+ "w": 24,
+ "x": 0,
+ "y": 38
+ },
+ "id": 38,
+ "panels": [],
+ "title": "Handle Event Callback Details",
+ "type": "row"
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "The average time it took to complete the handle_event callback function.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 0,
+ "y": 39
+ },
+ "hiddenSeries": false,
+ "id": 39,
+ "legend": {
+ "alignAsTable": true,
+ "avg": false,
+ "current": true,
+ "hideEmpty": false,
+ "hideZero": false,
+ "max": true,
+ "min": false,
+ "rightSide": true,
+ "show": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null as zero",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "sum(irate(pinchflat_prom_ex_phoenix_live_view_handle_event_duration_milliseconds_sum{job=\"$job\", instance=\"$instance\"}[$interval])) by(action, module, event) / sum(irate(pinchflat_prom_ex_phoenix_live_view_handle_event_duration_milliseconds_count{job=\"$job\", instance=\"$instance\"}[$interval])) by(action, module, event)",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "({{ action }}) {{ module }} :: {{ event }}",
+ "refId": "A"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Average Execution Time",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "ms",
+ "label": "",
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "description": "The average time it took to get through the handle_event callback function.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 12,
+ "y": 39
+ },
+ "hiddenSeries": false,
+ "id": 40,
+ "legend": {
+ "alignAsTable": true,
+ "avg": false,
+ "current": true,
+ "hideEmpty": false,
+ "hideZero": false,
+ "max": true,
+ "min": false,
+ "rightSide": true,
+ "show": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null as zero",
+ "percentage": false,
+ "pluginVersion": "7.1.3",
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "sum(irate(pinchflat_prom_ex_phoenix_live_view_handle_event_exception_duration_milliseconds_sum{job=\"$job\", instance=\"$instance\"}[$interval])) by(event, action, module, kind, reason) / sum(irate(pinchflat_prom_ex_phoenix_live_view_handle_event_exception_duration_milliseconds_count{job=\"$job\", instance=\"$instance\"}[$interval])) by(event, action, module, kind, reason)",
+ "instant": false,
+ "interval": "",
+ "legendFormat": "({{ action }}) {{ module }} :: {{event}} ({{kind}} -> {{reason}})",
+ "refId": "A"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "Average Exception Execution Time",
+ "tooltip": {
+ "shared": true,
+ "sort": 2,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "ms",
+ "label": "",
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": false
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "cards": {
+ "cardPadding": null,
+ "cardRound": null
+ },
+ "color": {
+ "cardColor": "#b4ff00",
+ "colorScale": "sqrt",
+ "colorScheme": "interpolateOranges",
+ "exponent": 0.5,
+ "mode": "spectrum"
+ },
+ "dataFormat": "tsbuckets",
+ "datasource": "prometheus",
+ "description": "A heatmap showing the time it took to get through the mount callback.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 0,
+ "y": 52
+ },
+ "heatmap": {},
+ "hideZeroBuckets": true,
+ "highlightCards": true,
+ "id": 41,
+ "legend": {
+ "show": true
+ },
+ "pluginVersion": "7.1.3",
+ "reverseYBuckets": false,
+ "targets": [
+ {
+ "expr": "sum(irate(pinchflat_prom_ex_phoenix_live_view_handle_event_duration_milliseconds_bucket{job=\"$job\", instance=\"$instance\"}[$interval])) by (le)",
+ "format": "heatmap",
+ "hide": false,
+ "interval": "",
+ "legendFormat": "{{ le }}",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Execution Time",
+ "tooltip": {
+ "show": true,
+ "showHistogram": true
+ },
+ "type": "heatmap",
+ "xAxis": {
+ "show": true
+ },
+ "xBucketNumber": null,
+ "xBucketSize": null,
+ "yAxis": {
+ "decimals": null,
+ "format": "ms",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true,
+ "splitFactor": null
+ },
+ "yBucketBound": "auto",
+ "yBucketNumber": null,
+ "yBucketSize": null
+ },
+ {
+ "cards": {
+ "cardPadding": null,
+ "cardRound": null
+ },
+ "color": {
+ "cardColor": "#b4ff00",
+ "colorScale": "sqrt",
+ "colorScheme": "interpolateOranges",
+ "exponent": 0.5,
+ "mode": "spectrum"
+ },
+ "dataFormat": "tsbuckets",
+ "datasource": "prometheus",
+ "description": "A heatmap showing the time it took to get through the handle_event callback when an exception was encountered.",
+ "fieldConfig": {
+ "defaults": {
+ "custom": {}
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 13,
+ "w": 12,
+ "x": 12,
+ "y": 52
+ },
+ "heatmap": {},
+ "hideZeroBuckets": true,
+ "highlightCards": true,
+ "id": 42,
+ "legend": {
+ "show": true
+ },
+ "pluginVersion": "7.1.3",
+ "reverseYBuckets": false,
+ "targets": [
+ {
+ "expr": "sum(irate(pinchflat_prom_ex_phoenix_live_view_handle_event_exception_duration_milliseconds_bucket{job=\"$job\", instance=\"$instance\"}[$interval])) by (le)",
+ "format": "heatmap",
+ "hide": false,
+ "interval": "",
+ "legendFormat": "{{ le }}",
+ "refId": "A"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Exception Execution Time",
+ "tooltip": {
+ "show": true,
+ "showHistogram": true
+ },
+ "type": "heatmap",
+ "xAxis": {
+ "show": true
+ },
+ "xBucketNumber": null,
+ "xBucketSize": null,
+ "yAxis": {
+ "decimals": null,
+ "format": "ms",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true,
+ "splitFactor": null
+ },
+ "yBucketBound": "auto",
+ "yBucketNumber": null,
+ "yBucketSize": null
+ }
+ ],
+ "refresh": "5s",
+ "schemaVersion": 26,
+ "style": "dark",
+ "tags": ["PromEx", "Phoenix LiveView", "pinchflat"],
+ "templating": {
+ "list": [
+ {
+ "allValue": null,
+ "datasource": "prometheus",
+ "definition": "label_values(pinchflat_prom_ex_prom_ex_status_info, job)",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Prometheus Job",
+ "multi": false,
+ "name": "job",
+ "options": [],
+ "query": "label_values(pinchflat_prom_ex_prom_ex_status_info, job)",
+ "refresh": 2,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 6,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ },
+ {
+ "allValue": null,
+ "datasource": "prometheus",
+ "definition": "label_values(pinchflat_prom_ex_prom_ex_status_info, instance)",
+ "hide": 0,
+ "includeAll": false,
+ "label": "Application Instance",
+ "multi": false,
+ "name": "instance",
+ "options": [],
+ "query": "label_values(pinchflat_prom_ex_prom_ex_status_info{job=\"$job\"}, instance)",
+ "refresh": 2,
+ "regex": "",
+ "skipUrlSync": false,
+ "sort": 0,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ },
+ {
+ "auto": false,
+ "auto_count": 30,
+ "auto_min": "10s",
+ "current": {
+ "selected": false,
+ "text": "30s",
+ "value": "30s"
+ },
+ "hide": 0,
+ "label": "Interval",
+ "name": "interval",
+ "options": [
+ {
+ "selected": false,
+ "text": "15s",
+ "value": "15s"
+ },
+ {
+ "selected": true,
+ "text": "30s",
+ "value": "30s"
+ },
+ {
+ "selected": false,
+ "text": "1m",
+ "value": "1m"
+ },
+ {
+ "selected": false,
+ "text": "5m",
+ "value": "5m"
+ },
+ {
+ "selected": false,
+ "text": "15m",
+ "value": "15m"
+ },
+ {
+ "selected": false,
+ "text": "30m",
+ "value": "30m"
+ },
+ {
+ "selected": false,
+ "text": "1h",
+ "value": "1h"
+ }
+ ],
+ "query": "15s, 30s, 1m, 5m, 15m, 30m, 1h",
+ "queryValue": "",
+ "refresh": 2,
+ "skipUrlSync": false,
+ "type": "interval"
+ }
+ ]
+ },
+ "time": {
+ "from": "now-1h",
+ "to": "now"
+ },
+ "timepicker": {
+ "refresh_intervals": ["5s", "10s", "30s", "1m", "5m"]
+ },
+ "timezone": "",
+ "title": "Pinchflat - PromEx PhoenixLiveView Dashboard",
+ "uid": "57C5565313BA3CBE98CC2A2F8C18248F",
+ "version": 1
+}
diff --git a/priv/repo/erd.png b/priv/repo/erd.png
index 365c7b4..4f06954 100644
Binary files a/priv/repo/erd.png and b/priv/repo/erd.png differ
diff --git a/priv/repo/migrations/20240715212133_add_playlist_index_to_media_items.exs b/priv/repo/migrations/20240715212133_add_playlist_index_to_media_items.exs
new file mode 100644
index 0000000..e629efd
--- /dev/null
+++ b/priv/repo/migrations/20240715212133_add_playlist_index_to_media_items.exs
@@ -0,0 +1,9 @@
+defmodule Pinchflat.Repo.Migrations.AddPlaylistIndexToMediaItems do
+ use Ecto.Migration
+
+ def change do
+ alter table(:media_items) do
+ add :playlist_index, :integer, null: false, default: 0
+ end
+ end
+end
diff --git a/priv/repo/migrations/20240722183656_add_marked_for_deletion_at_to_sources_and_profiles.exs b/priv/repo/migrations/20240722183656_add_marked_for_deletion_at_to_sources_and_profiles.exs
new file mode 100644
index 0000000..81a99e0
--- /dev/null
+++ b/priv/repo/migrations/20240722183656_add_marked_for_deletion_at_to_sources_and_profiles.exs
@@ -0,0 +1,13 @@
+defmodule Pinchflat.Repo.Migrations.AddMarkedForDeletionAtToSources do
+ use Ecto.Migration
+
+ def change do
+ alter table(:sources) do
+ add :marked_for_deletion_at, :utc_datetime
+ end
+
+ alter table(:media_profiles) do
+ add :marked_for_deletion_at, :utc_datetime
+ end
+ end
+end
diff --git a/priv/repo/migrations/20240814154844_add_use_cookies_to_sources.exs b/priv/repo/migrations/20240814154844_add_use_cookies_to_sources.exs
new file mode 100644
index 0000000..c7b0f59
--- /dev/null
+++ b/priv/repo/migrations/20240814154844_add_use_cookies_to_sources.exs
@@ -0,0 +1,9 @@
+defmodule Pinchflat.Repo.Migrations.AddUseCookiesToSources do
+ use Ecto.Migration
+
+ def change do
+ alter table(:sources) do
+ add :use_cookies, :boolean, default: true, null: false
+ end
+ end
+end
diff --git a/priv/repo/migrations/20240814193939_add_duration_limits_to_sources.exs b/priv/repo/migrations/20240814193939_add_duration_limits_to_sources.exs
new file mode 100644
index 0000000..75b55f4
--- /dev/null
+++ b/priv/repo/migrations/20240814193939_add_duration_limits_to_sources.exs
@@ -0,0 +1,10 @@
+defmodule Pinchflat.Repo.Migrations.AddDurationLimitsToSources do
+ use Ecto.Migration
+
+ def change do
+ alter table(:sources) do
+ add :min_duration_seconds, :integer
+ add :max_duration_seconds, :integer
+ end
+ end
+end
diff --git a/priv/repo/migrations/20240910173050_add_media_container_to_media_profiles.exs b/priv/repo/migrations/20240910173050_add_media_container_to_media_profiles.exs
new file mode 100644
index 0000000..3ad34d0
--- /dev/null
+++ b/priv/repo/migrations/20240910173050_add_media_container_to_media_profiles.exs
@@ -0,0 +1,9 @@
+defmodule Pinchflat.Repo.Migrations.AddMediaContainerToMediaProfiles do
+ use Ecto.Migration
+
+ def change do
+ alter table(:media_profiles) do
+ add :media_container, :string
+ end
+ end
+end
diff --git a/priv/repo/migrations/20241107201850_add_predicted_media_filepath_to_media_items.exs b/priv/repo/migrations/20241107201850_add_predicted_media_filepath_to_media_items.exs
new file mode 100644
index 0000000..630b856
--- /dev/null
+++ b/priv/repo/migrations/20241107201850_add_predicted_media_filepath_to_media_items.exs
@@ -0,0 +1,9 @@
+defmodule Pinchflat.Repo.Migrations.AddPredictedMediaFilepathToMediaItems do
+ use Ecto.Migration
+
+ def change do
+ alter table(:media_items) do
+ add :predicted_media_filepath, :string
+ end
+ end
+end
diff --git a/priv/repo/migrations/20241120204407_add_enabled_to_sources.exs b/priv/repo/migrations/20241120204407_add_enabled_to_sources.exs
new file mode 100644
index 0000000..e98150b
--- /dev/null
+++ b/priv/repo/migrations/20241120204407_add_enabled_to_sources.exs
@@ -0,0 +1,9 @@
+defmodule Pinchflat.Repo.Migrations.AddEnabledToSources do
+ use Ecto.Migration
+
+ def change do
+ alter table(:sources) do
+ add :enabled, :boolean, default: true, null: false
+ end
+ end
+end
diff --git a/priv/repo/migrations/20241127172054_add_audio_lang_to_media_profiles.exs b/priv/repo/migrations/20241127172054_add_audio_lang_to_media_profiles.exs
new file mode 100644
index 0000000..b19d981
--- /dev/null
+++ b/priv/repo/migrations/20241127172054_add_audio_lang_to_media_profiles.exs
@@ -0,0 +1,9 @@
+defmodule Pinchflat.Repo.Migrations.AddAudioLangToMediaProfiles do
+ use Ecto.Migration
+
+ def change do
+ alter table(:media_profiles) do
+ add :audio_track, :string
+ end
+ end
+end
diff --git a/priv/repo/migrations/20241230192618_add_route_token_to_settings.exs b/priv/repo/migrations/20241230192618_add_route_token_to_settings.exs
new file mode 100644
index 0000000..331c7d2
--- /dev/null
+++ b/priv/repo/migrations/20241230192618_add_route_token_to_settings.exs
@@ -0,0 +1,11 @@
+defmodule Pinchflat.Repo.Migrations.AddRouteTokenToSettings do
+ use Ecto.Migration
+
+ def change do
+ alter table(:settings) do
+ add :route_token, :string, null: false, default: "tmp-token"
+ end
+
+ execute "UPDATE settings SET route_token = gen_random_uuid();", "SELECT 1;"
+ end
+end
diff --git a/priv/repo/migrations/20250110231704_add_extractor_sleep_interval_to_settings.exs b/priv/repo/migrations/20250110231704_add_extractor_sleep_interval_to_settings.exs
new file mode 100644
index 0000000..17dd735
--- /dev/null
+++ b/priv/repo/migrations/20250110231704_add_extractor_sleep_interval_to_settings.exs
@@ -0,0 +1,9 @@
+defmodule Pinchflat.Repo.Migrations.AddExtractorSleepIntervalToSettings do
+ use Ecto.Migration
+
+ def change do
+ alter table(:settings) do
+ add :extractor_sleep_interval_seconds, :number, null: false, default: 0
+ end
+ end
+end
diff --git a/priv/repo/migrations/20250210201413_add_last_error_to_media_item.exs b/priv/repo/migrations/20250210201413_add_last_error_to_media_item.exs
new file mode 100644
index 0000000..d8723ba
--- /dev/null
+++ b/priv/repo/migrations/20250210201413_add_last_error_to_media_item.exs
@@ -0,0 +1,9 @@
+defmodule Pinchflat.Repo.Migrations.AddLastErrorToMediaItem do
+ use Ecto.Migration
+
+ def change do
+ alter table(:media_items) do
+ add :last_error, :string
+ end
+ end
+end
diff --git a/priv/repo/migrations/20250304185528_add_cookie_behaviour_to_sources.exs b/priv/repo/migrations/20250304185528_add_cookie_behaviour_to_sources.exs
new file mode 100644
index 0000000..0eb9eb0
--- /dev/null
+++ b/priv/repo/migrations/20250304185528_add_cookie_behaviour_to_sources.exs
@@ -0,0 +1,18 @@
+defmodule Pinchflat.Repo.Migrations.AddCookieBehaviourToSources do
+ use Ecto.Migration
+
+ def change do
+ alter table(:sources) do
+ add :cookie_behaviour, :string, null: false, default: "disabled"
+ end
+
+ execute(
+ "UPDATE sources SET cookie_behaviour = 'all_operations' WHERE use_cookies = TRUE",
+ "UPDATE sources SET use_cookies = TRUE WHERE cookie_behaviour = 'all_operations'"
+ )
+
+ alter table(:sources) do
+ remove :use_cookies, :boolean, null: false, default: false
+ end
+ end
+end
diff --git a/priv/repo/migrations/20250311222451_add_rate_limit_speed_to_settings.exs b/priv/repo/migrations/20250311222451_add_rate_limit_speed_to_settings.exs
new file mode 100644
index 0000000..fd7cc82
--- /dev/null
+++ b/priv/repo/migrations/20250311222451_add_rate_limit_speed_to_settings.exs
@@ -0,0 +1,9 @@
+defmodule Pinchflat.Repo.Migrations.AddRateLimitSpeedToSettings do
+ use Ecto.Migration
+
+ def change do
+ alter table(:settings) do
+ add :download_throughput_limit, :string
+ end
+ end
+end
diff --git a/priv/repo/migrations/20250317213740_add_restrict_filenames_to_settings.exs b/priv/repo/migrations/20250317213740_add_restrict_filenames_to_settings.exs
new file mode 100644
index 0000000..6a6591d
--- /dev/null
+++ b/priv/repo/migrations/20250317213740_add_restrict_filenames_to_settings.exs
@@ -0,0 +1,9 @@
+defmodule Pinchflat.Repo.Migrations.AddRestrictFilenamesToSettings do
+ use Ecto.Migration
+
+ def change do
+ alter table(:settings) do
+ add :restrict_filenames, :boolean, default: false
+ end
+ end
+end
diff --git a/rel/overlays/bin/docker_start b/rel/overlays/bin/docker_start
index 4f2d132..b60c6d3 100755
--- a/rel/overlays/bin/docker_start
+++ b/rel/overlays/bin/docker_start
@@ -6,6 +6,9 @@ if [ $? -ne 0 ]; then
exit 1
fi
+echo "Setting umask to ${UMASK}"
+umask ${UMASK}
+
/app/bin/migrate
cd -P -- "$(dirname -- "$0")"
diff --git a/test/pinchflat/boot/post_boot_startup_tasks_test.exs b/test/pinchflat/boot/post_boot_startup_tasks_test.exs
new file mode 100644
index 0000000..43a7086
--- /dev/null
+++ b/test/pinchflat/boot/post_boot_startup_tasks_test.exs
@@ -0,0 +1,16 @@
+defmodule Pinchflat.Boot.PostBootStartupTasksTest do
+ use Pinchflat.DataCase
+
+ alias Pinchflat.YtDlp.UpdateWorker
+ alias Pinchflat.Boot.PostBootStartupTasks
+
+ describe "update_yt_dlp" do
+ test "enqueues an update job" do
+ assert [] = all_enqueued(worker: UpdateWorker)
+
+ PostBootStartupTasks.init(%{})
+
+ assert [%Oban.Job{}] = all_enqueued(worker: UpdateWorker)
+ end
+ end
+end
diff --git a/test/pinchflat/boot/pre_job_startup_tasks_test.exs b/test/pinchflat/boot/pre_job_startup_tasks_test.exs
index a940a4d..ceea6d4 100644
--- a/test/pinchflat/boot/pre_job_startup_tasks_test.exs
+++ b/test/pinchflat/boot/pre_job_startup_tasks_test.exs
@@ -9,6 +9,7 @@ defmodule Pinchflat.Boot.PreJobStartupTasksTest do
setup do
stub(YtDlpRunnerMock, :version, fn -> {:ok, "1"} end)
stub(AppriseRunnerMock, :version, fn -> {:ok, "2"} end)
+ stub(UserScriptRunnerMock, :run, fn _event_type, _data -> {:ok, "3", 0} end)
:ok
end
@@ -112,4 +113,16 @@ defmodule Pinchflat.Boot.PreJobStartupTasksTest do
assert Settings.get!(:apprise_version)
end
end
+
+ describe "run_app_init_script" do
+ test "calls the app_init user script runner" do
+ expect(UserScriptRunnerMock, :run, fn :app_init, data ->
+ assert data == %{}
+
+ {:ok, "", 0}
+ end)
+
+ PreJobStartupTasks.init(%{})
+ end
+ end
end
diff --git a/test/pinchflat/downloading/download_option_builder_test.exs b/test/pinchflat/downloading/download_option_builder_test.exs
index d31c6c6..f625654 100644
--- a/test/pinchflat/downloading/download_option_builder_test.exs
+++ b/test/pinchflat/downloading/download_option_builder_test.exs
@@ -6,7 +6,6 @@ defmodule Pinchflat.Downloading.DownloadOptionBuilderTest do
alias Pinchflat.Sources
alias Pinchflat.Profiles
- alias Pinchflat.Settings
alias Pinchflat.Utils.FilesystemUtils
alias Pinchflat.Downloading.DownloadOptionBuilder
@@ -93,15 +92,23 @@ defmodule Pinchflat.Downloading.DownloadOptionBuilderTest do
end
test "includes :write_auto_subs option when specified", %{media_item: media_item} do
- media_item = update_media_profile_attribute(media_item, %{download_subs: true, download_auto_subs: true})
+ media_item_1 = update_media_profile_attribute(media_item, %{download_subs: true, download_auto_subs: true})
+ media_item_2 = update_media_profile_attribute(media_item, %{embed_subs: true, download_auto_subs: true})
- assert {:ok, res} = DownloadOptionBuilder.build(media_item)
+ assert {:ok, res_1} = DownloadOptionBuilder.build(media_item_1)
+ assert {:ok, res_2} = DownloadOptionBuilder.build(media_item_2)
- assert :write_auto_subs in res
+ assert :write_auto_subs in res_1
+ assert :write_auto_subs in res_2
end
- test "doesn't include :write_auto_subs option when download_subs is false", %{media_item: media_item} do
- media_item = update_media_profile_attribute(media_item, %{download_subs: false, download_auto_subs: true})
+ test "doesn't include :write_auto_subs option when download_subs and embed_subs is false", %{media_item: media_item} do
+ media_item =
+ update_media_profile_attribute(media_item, %{
+ download_subs: false,
+ embed_subs: false,
+ download_auto_subs: true
+ })
assert {:ok, res} = DownloadOptionBuilder.build(media_item)
@@ -244,22 +251,15 @@ defmodule Pinchflat.Downloading.DownloadOptionBuilderTest do
end
end
- describe "build/1 when testing quality options" do
- test "includes quality options" do
- resolutions = ["360", "480", "720", "1080", "2160", "4320"]
+ describe "build/1 when testing media quality and format options" do
+ # There are more tests inside QualityOptionBuilderTest
+ # This is essenitally just testing that we implement that module correctly
- Enum.each(resolutions, fn resolution ->
- resolution_atom = String.to_existing_atom(resolution <> "p")
+ test "includes video options for video profiles", %{media_item: media_item} do
+ assert {:ok, res} = DownloadOptionBuilder.build(media_item)
- media_profile = media_profile_fixture(%{preferred_resolution: resolution_atom})
- source = source_fixture(%{media_profile_id: media_profile.id})
- media_item = Repo.preload(media_item_fixture(source_id: source.id), source: :media_profile)
-
- assert {:ok, res} = DownloadOptionBuilder.build(media_item)
-
- assert {:format_sort, "res:#{resolution},+codec:avc:m4a"} in res
- assert {:remux_video, "mp4"} in res
- end)
+ assert {:format_sort, "res:1080,+codec:avc:m4a"} in res
+ assert {:remux_video, "mp4"} in res
end
test "includes quality options for audio only", %{media_item: media_item} do
@@ -272,17 +272,6 @@ defmodule Pinchflat.Downloading.DownloadOptionBuilderTest do
refute {:remux_video, "mp4"} in res
end
-
- test "includes custom quality options if specified", %{media_item: media_item} do
- Settings.set(video_codec_preference: "av01")
- Settings.set(audio_codec_preference: "aac")
-
- media_item = update_media_profile_attribute(media_item, %{preferred_resolution: :"1080p"})
-
- assert {:ok, res} = DownloadOptionBuilder.build(media_item)
-
- assert {:format_sort, "res:1080,+codec:av01:aac"} in res
- end
end
describe "build/1 when testing sponsorblock options" do
@@ -298,7 +287,19 @@ defmodule Pinchflat.Downloading.DownloadOptionBuilderTest do
assert {:sponsorblock_remove, "sponsor,intro"} in res
end
- test "does not include :sponsorblock_remove option without categories", %{media_item: media_item} do
+ test "includes :sponsorblock_mark option when specified", %{media_item: media_item} do
+ media_item =
+ update_media_profile_attribute(media_item, %{
+ sponsorblock_behaviour: :mark,
+ sponsorblock_categories: ["sponsor", "intro"]
+ })
+
+ assert {:ok, res} = DownloadOptionBuilder.build(media_item)
+
+ assert {:sponsorblock_mark, "sponsor,intro"} in res
+ end
+
+ test "does not include any sponsorblock option without categories", %{media_item: media_item} do
media_item =
update_media_profile_attribute(media_item, %{
sponsorblock_behaviour: :remove,
@@ -307,9 +308,10 @@ defmodule Pinchflat.Downloading.DownloadOptionBuilderTest do
assert {:ok, res} = DownloadOptionBuilder.build(media_item)
- refute {:sponsorblock_remove, ""} in res
- refute {:sponsorblock_remove, []} in res
+ refute Keyword.has_key?(res, :sponsorblock_remove)
+ refute Keyword.has_key?(res, :sponsorblock_mark)
refute :sponsorblock_remove in res
+ refute :sponsorblock_mark in res
end
test "does not include any sponsorblock options when disabled", %{media_item: media_item} do
@@ -318,9 +320,10 @@ defmodule Pinchflat.Downloading.DownloadOptionBuilderTest do
assert {:ok, res} = DownloadOptionBuilder.build(media_item)
- refute {:sponsorblock_remove, ""} in res
- refute {:sponsorblock_remove, []} in res
+ refute Keyword.has_key?(res, :sponsorblock_remove)
+ refute Keyword.has_key?(res, :sponsorblock_mark)
refute :sponsorblock_remove in res
+ refute :sponsorblock_mark in res
end
end
@@ -437,6 +440,22 @@ defmodule Pinchflat.Downloading.DownloadOptionBuilderTest do
end
end
+ describe "build_quality_options_for/1" do
+ test "builds quality options for a media item", %{media_item: media_item} do
+ options = DownloadOptionBuilder.build_quality_options_for(media_item)
+
+ assert {:format_sort, "res:1080,+codec:avc:m4a"} in options
+ assert {:remux_video, "mp4"} in options
+ end
+
+ test "builds quality options for a source", %{media_item: media_item} do
+ options = DownloadOptionBuilder.build_quality_options_for(media_item.source)
+
+ assert {:format_sort, "res:1080,+codec:avc:m4a"} in options
+ assert {:remux_video, "mp4"} in options
+ end
+ end
+
defp update_media_profile_attribute(media_item_with_preloads, attrs) do
media_item_with_preloads.source.media_profile
|> Profiles.change_media_profile(attrs)
diff --git a/test/pinchflat/downloading/downloading_helpers_test.exs b/test/pinchflat/downloading/downloading_helpers_test.exs
index 01b0518..cc2f62a 100644
--- a/test/pinchflat/downloading/downloading_helpers_test.exs
+++ b/test/pinchflat/downloading/downloading_helpers_test.exs
@@ -10,7 +10,7 @@ defmodule Pinchflat.Downloading.DownloadingHelpersTest do
alias Pinchflat.Downloading.MediaDownloadWorker
describe "enqueue_pending_download_tasks/1" do
- test "it enqueues a job for each pending media item" do
+ test "enqueues a job for each pending media item" do
source = source_fixture()
media_item = media_item_fixture(source_id: source.id, media_filepath: nil)
@@ -19,7 +19,7 @@ defmodule Pinchflat.Downloading.DownloadingHelpersTest do
assert_enqueued(worker: MediaDownloadWorker, args: %{"id" => media_item.id})
end
- test "it does not enqueue a job for media items with a filepath" do
+ test "does not enqueue a job for media items with a filepath" do
source = source_fixture()
_media_item = media_item_fixture(source_id: source.id, media_filepath: "some/filepath.mp4")
@@ -28,7 +28,7 @@ defmodule Pinchflat.Downloading.DownloadingHelpersTest do
refute_enqueued(worker: MediaDownloadWorker)
end
- test "it attaches a task to each enqueued job" do
+ test "attaches a task to each enqueued job" do
source = source_fixture()
media_item = media_item_fixture(source_id: source.id, media_filepath: nil)
@@ -39,7 +39,7 @@ defmodule Pinchflat.Downloading.DownloadingHelpersTest do
assert [_] = Tasks.list_tasks_for(media_item)
end
- test "it does not create a job if the source is set to not download" do
+ test "does not create a job if the source is set to not download" do
source = source_fixture(download_media: false)
assert :ok = DownloadingHelpers.enqueue_pending_download_tasks(source)
@@ -47,17 +47,26 @@ defmodule Pinchflat.Downloading.DownloadingHelpersTest do
refute_enqueued(worker: MediaDownloadWorker)
end
- test "it does not attach tasks if the source is set to not download" do
+ test "does not attach tasks if the source is set to not download" do
source = source_fixture(download_media: false)
media_item = media_item_fixture(source_id: source.id, media_filepath: nil)
assert :ok = DownloadingHelpers.enqueue_pending_download_tasks(source)
assert [] = Tasks.list_tasks_for(media_item)
end
+
+ test "can pass job options" do
+ source = source_fixture()
+ media_item = media_item_fixture(source_id: source.id, media_filepath: nil)
+
+ assert :ok = DownloadingHelpers.enqueue_pending_download_tasks(source, priority: 1)
+
+ assert_enqueued(worker: MediaDownloadWorker, args: %{"id" => media_item.id}, priority: 1)
+ end
end
describe "dequeue_pending_download_tasks/1" do
- test "it deletes all pending tasks for a source's media items" do
+ test "deletes all pending tasks for a source's media items" do
source = source_fixture()
media_item = media_item_fixture(source_id: source.id, media_filepath: nil)
@@ -109,6 +118,14 @@ defmodule Pinchflat.Downloading.DownloadingHelpersTest do
refute_enqueued(worker: MediaDownloadWorker)
end
+
+ test "can pass job options" do
+ media_item = media_item_fixture(media_filepath: nil)
+
+ assert {:ok, _} = DownloadingHelpers.kickoff_download_if_pending(media_item, priority: 1)
+
+ assert_enqueued(worker: MediaDownloadWorker, args: %{"id" => media_item.id}, priority: 1)
+ end
end
describe "kickoff_redownload_for_existing_media/1" do
@@ -130,9 +147,6 @@ defmodule Pinchflat.Downloading.DownloadingHelpersTest do
_download_prevented =
media_item_fixture(source_id: source.id, media_filepath: "some/filepath.mp4", prevent_download: true)
- _culled =
- media_item_fixture(source_id: source.id, media_filepath: "some/filepath.mp4", culled_at: now())
-
assert [] = DownloadingHelpers.kickoff_redownload_for_existing_media(source)
refute_enqueued(worker: MediaDownloadWorker)
diff --git a/test/pinchflat/downloading/media_download_worker_test.exs b/test/pinchflat/downloading/media_download_worker_test.exs
index 754f838..9b2e0f8 100644
--- a/test/pinchflat/downloading/media_download_worker_test.exs
+++ b/test/pinchflat/downloading/media_download_worker_test.exs
@@ -9,8 +9,13 @@ defmodule Pinchflat.Downloading.MediaDownloadWorkerTest do
alias Pinchflat.Downloading.MediaDownloadWorker
setup do
- stub(YtDlpRunnerMock, :run, fn _url, _opts, _ot -> {:ok, ""} end)
- stub(UserScriptRunnerMock, :run, fn _event_type, _data -> :ok end)
+ stub(YtDlpRunnerMock, :run, fn
+ _url, :get_downloadable_status, _opts, _ot, _addl -> {:ok, "{}"}
+ _url, :download_thumbnail, _opts, _ot, _addl -> {:ok, ""}
+ _url, :download, _opts, _ot, _addl -> {:ok, render_metadata(:media_metadata)}
+ end)
+
+ stub(UserScriptRunnerMock, :run, fn _event_type, _data -> {:ok, "", 0} end)
stub(HTTPClientMock, :get, fn _url, _headers, _opts -> {:ok, ""} end)
media_item =
@@ -41,22 +46,35 @@ defmodule Pinchflat.Downloading.MediaDownloadWorkerTest do
assert_enqueued(worker: MediaDownloadWorker, args: %{"id" => media_item.id, "force" => true})
end
- test "can be called with additional job options", %{media_item: media_item} do
- job_opts = [max_attempts: 5]
-
- assert {:ok, _} = MediaDownloadWorker.kickoff_with_task(media_item, %{}, job_opts)
+ test "has a priority of 5 by default", %{media_item: media_item} do
+ assert {:ok, _} = MediaDownloadWorker.kickoff_with_task(media_item)
[job] = all_enqueued(worker: MediaDownloadWorker, args: %{"id" => media_item.id})
- assert job.max_attempts == 5
+
+ assert job.priority == 5
+ end
+
+ test "priority can be set", %{media_item: media_item} do
+ assert {:ok, _} = MediaDownloadWorker.kickoff_with_task(media_item, %{}, priority: 0)
+
+ [job] = all_enqueued(worker: MediaDownloadWorker, args: %{"id" => media_item.id})
+
+ assert job.priority == 0
end
end
describe "perform/1" do
- test "it saves attributes to the media_item", %{media_item: media_item} do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl ->
- {:ok, render_metadata(:media_metadata)}
+ setup do
+ stub(YtDlpRunnerMock, :run, fn
+ _url, :get_downloadable_status, _opts, _ot, _addl -> {:ok, "{}"}
+ _url, :download, _opts, _ot, _addl -> {:ok, render_metadata(:media_metadata)}
+ _url, :download_thumbnail, _opts, _ot, _addl -> {:ok, ""}
end)
+ :ok
+ end
+
+ test "saves attributes to the media_item", %{media_item: media_item} do
assert media_item.media_filepath == nil
perform_job(MediaDownloadWorker, %{id: media_item.id})
media_item = Repo.reload(media_item)
@@ -64,25 +82,24 @@ defmodule Pinchflat.Downloading.MediaDownloadWorkerTest do
assert media_item.media_filepath != nil
end
- test "it saves the metadata to the media_item", %{media_item: media_item} do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl ->
- {:ok, render_metadata(:media_metadata)}
- end)
-
+ test "saves the metadata to the media_item", %{media_item: media_item} do
assert media_item.metadata == nil
perform_job(MediaDownloadWorker, %{id: media_item.id})
assert Repo.reload(media_item).metadata != nil
end
- test "it won't double-schedule downloading jobs", %{media_item: media_item} do
+ test "won't double-schedule downloading jobs", %{media_item: media_item} do
Oban.insert(MediaDownloadWorker.new(%{id: media_item.id}))
Oban.insert(MediaDownloadWorker.new(%{id: media_item.id}))
assert [_] = all_enqueued(worker: MediaDownloadWorker)
end
- test "it sets the job to retryable if the download fails", %{media_item: media_item} do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl -> {:error, "error"} end)
+ test "sets the job to retryable if the download fails", %{media_item: media_item} do
+ expect(YtDlpRunnerMock, :run, 2, fn
+ _url, :get_downloadable_status, _opts, _ot, _addl -> {:ok, "{}"}
+ _url, :download, _opts, _ot, _addl -> {:error, "error"}
+ end)
Oban.Testing.with_testing_mode(:inline, fn ->
{:ok, job} = Oban.insert(MediaDownloadWorker.new(%{id: media_item.id}))
@@ -92,8 +109,9 @@ defmodule Pinchflat.Downloading.MediaDownloadWorkerTest do
end
test "sets the job to retryable if the download failed and was retried", %{media_item: media_item} do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl ->
- {:error, "Unable to communicate with SponsorBlock", 1}
+ expect(YtDlpRunnerMock, :run, 2, fn
+ _url, :get_downloadable_status, _opts, _ot, _addl -> {:ok, "{}"}
+ _url, :download, _opts, _ot, _addl -> {:error, "Unable to communicate with SponsorBlock", 1}
end)
Oban.Testing.with_testing_mode(:inline, fn ->
@@ -104,8 +122,9 @@ defmodule Pinchflat.Downloading.MediaDownloadWorkerTest do
end
test "does not set the job to retryable if retrying wouldn't fix the issue", %{media_item: media_item} do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl ->
- {:error, "Something something Video unavailable something something", 1}
+ expect(YtDlpRunnerMock, :run, 2, fn
+ _url, :get_downloadable_status, _opts, _ot, _addl -> {:ok, "{}"}
+ _url, :download, _opts, _ot, _addl -> {:error, "Something something Video unavailable something something", 1}
end)
Oban.Testing.with_testing_mode(:inline, fn ->
@@ -115,14 +134,46 @@ defmodule Pinchflat.Downloading.MediaDownloadWorkerTest do
end)
end
- test "it ensures error are returned in a 2-item tuple", %{media_item: media_item} do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl -> {:error, "error", 1} end)
+ test "does not set the job to retryable if youtube thinks you're a bot", %{media_item: media_item} do
+ expect(YtDlpRunnerMock, :run, 2, fn
+ _url, :get_downloadable_status, _opts, _ot, _addl -> {:ok, "{}"}
+ _url, :download, _opts, _ot, _addl -> {:error, "Sign in to confirm you're not a bot", 1}
+ end)
+
+ Oban.Testing.with_testing_mode(:inline, fn ->
+ {:ok, job} = Oban.insert(MediaDownloadWorker.new(%{id: media_item.id, quality_upgrade?: true}))
+
+ assert job.state == "completed"
+ end)
+ end
+
+ test "does not set the job to retryable you aren't a member", %{media_item: media_item} do
+ expect(YtDlpRunnerMock, :run, 2, fn
+ _url, :get_downloadable_status, _opts, _ot, _addl ->
+ {:ok, "{}"}
+
+ _url, :download, _opts, _ot, _addl ->
+ {:error, "This video is available to this channel's members on level: foo", 1}
+ end)
+
+ Oban.Testing.with_testing_mode(:inline, fn ->
+ {:ok, job} = Oban.insert(MediaDownloadWorker.new(%{id: media_item.id, quality_upgrade?: true}))
+
+ assert job.state == "completed"
+ end)
+ end
+
+ test "ensures error are returned in a 2-item tuple", %{media_item: media_item} do
+ expect(YtDlpRunnerMock, :run, 2, fn
+ _url, :get_downloadable_status, _opts, _ot, _addl -> {:ok, "{}"}
+ _url, :download, _opts, _ot, _addl -> {:error, "error", 1}
+ end)
assert {:error, :download_failed} = perform_job(MediaDownloadWorker, %{id: media_item.id})
end
- test "it does not download if the source is set to not download", %{media_item: media_item} do
- expect(YtDlpRunnerMock, :run, 0, fn _url, _opts, _ot, _addl -> :ok end)
+ test "does not download if the source is set to not download", %{media_item: media_item} do
+ expect(YtDlpRunnerMock, :run, 0, fn _url, :download, _opts, _ot, _addl -> :ok end)
Sources.update_source(media_item.source, %{download_media: false})
@@ -130,19 +181,26 @@ defmodule Pinchflat.Downloading.MediaDownloadWorkerTest do
end
test "does not download if the media item is set to not download", %{media_item: media_item} do
- expect(YtDlpRunnerMock, :run, 0, fn _url, _opts, _ot, _addl -> :ok end)
+ expect(YtDlpRunnerMock, :run, 0, fn _url, :download, _opts, _ot, _addl -> :ok end)
Media.update_media_item(media_item, %{prevent_download: true})
perform_job(MediaDownloadWorker, %{id: media_item.id})
end
- test "it saves the file's size to the database", %{media_item: media_item} do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl ->
- metadata = render_parsed_metadata(:media_metadata)
- FilesystemUtils.write_p!(metadata["filepath"], "test")
+ test "saves the file's size to the database", %{media_item: media_item} do
+ expect(YtDlpRunnerMock, :run, 3, fn
+ _url, :get_downloadable_status, _opts, _ot, _addl ->
+ {:ok, "{}"}
- {:ok, Phoenix.json_library().encode!(metadata)}
+ _url, :download, _opts, _ot, _addl ->
+ metadata = render_parsed_metadata(:media_metadata)
+ FilesystemUtils.write_p!(metadata["filepath"], "test")
+
+ {:ok, Phoenix.json_library().encode!(metadata)}
+
+ _url, :download_thumbnail, _opts, _ot, _addl ->
+ {:ok, ""}
end)
perform_job(MediaDownloadWorker, %{id: media_item.id})
@@ -152,49 +210,64 @@ defmodule Pinchflat.Downloading.MediaDownloadWorkerTest do
end
test "does not set redownloaded_at by default", %{media_item: media_item} do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl ->
- {:ok, render_metadata(:media_metadata)}
- end)
-
perform_job(MediaDownloadWorker, %{id: media_item.id})
media_item = Repo.reload(media_item)
assert media_item.media_redownloaded_at == nil
end
- test "calls the user script runner", %{media_item: media_item} do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl ->
- {:ok, render_metadata(:media_metadata)}
- end)
-
- expect(UserScriptRunnerMock, :run, fn :media_downloaded, data ->
- assert data.id == media_item.id
-
- :ok
- end)
-
- perform_job(MediaDownloadWorker, %{id: media_item.id})
- end
-
test "does not blow up if the record doesn't exist" do
assert :ok = perform_job(MediaDownloadWorker, %{id: 0})
end
test "sets the no_force_overwrites runner option", %{media_item: media_item} do
- expect(YtDlpRunnerMock, :run, fn _url, opts, _ot, _addl ->
- assert :no_force_overwrites in opts
- refute :force_overwrites in opts
+ expect(YtDlpRunnerMock, :run, 3, fn
+ _url, :get_downloadable_status, _opts, _ot, _addl ->
+ {:ok, "{}"}
- {:ok, render_metadata(:media_metadata)}
+ _url, :download, opts, _ot, _addl ->
+ assert :no_force_overwrites in opts
+ refute :force_overwrites in opts
+
+ {:ok, render_metadata(:media_metadata)}
+
+ _url, :download_thumbnail, _opts, _ot, _addl ->
+ {:ok, ""}
end)
perform_job(MediaDownloadWorker, %{id: media_item.id})
end
+
+ test "does not download if the media item isn't pending download", %{media_item: media_item} do
+ expect(YtDlpRunnerMock, :run, 0, fn _url, :download, _opts, _ot, _addl -> :ok end)
+
+ Media.update_media_item(media_item, %{media_filepath: "foo.mp4"})
+
+ perform_job(MediaDownloadWorker, %{id: media_item.id})
+ end
+ end
+
+ describe "perform/1 when testing non-downloadable media" do
+ test "does not retry the job if the media is currently not downloadable", %{media_item: media_item} do
+ stub(YtDlpRunnerMock, :run, fn _url, :get_downloadable_status, _opts, _ot, _addl ->
+ {:ok, Phoenix.json_library().encode!(%{"live_status" => "is_live"})}
+ end)
+
+ Oban.Testing.with_testing_mode(:inline, fn ->
+ {:ok, job} = Oban.insert(MediaDownloadWorker.new(%{id: media_item.id}))
+
+ assert job.state == "completed"
+ end)
+ end
end
describe "perform/1 when testing forced downloads" do
test "ignores 'prevent_download' if forced", %{media_item: media_item} do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl -> :ok end)
+ expect(YtDlpRunnerMock, :run, 3, fn
+ _url, :get_downloadable_status, _opts, _ot, _addl -> {:ok, "{}"}
+ _url, :download, _opts, _ot, _addl -> {:ok, render_metadata(:media_metadata)}
+ _url, :download_thumbnail, _opts, _ot, _addl -> {:ok, ""}
+ end)
Sources.update_source(media_item.source, %{download_media: false})
Media.update_media_item(media_item, %{prevent_download: true})
@@ -202,12 +275,31 @@ defmodule Pinchflat.Downloading.MediaDownloadWorkerTest do
perform_job(MediaDownloadWorker, %{id: media_item.id, force: true})
end
- test "sets force_overwrites runner option", %{media_item: media_item} do
- expect(YtDlpRunnerMock, :run, fn _url, opts, _ot, _addl ->
- assert :force_overwrites in opts
- refute :no_force_overwrites in opts
+ test "ignores whether the media item is pending when forced", %{media_item: media_item} do
+ expect(YtDlpRunnerMock, :run, 3, fn
+ _url, :get_downloadable_status, _opts, _ot, _addl -> {:ok, "{}"}
+ _url, :download, _opts, _ot, _addl -> {:ok, render_metadata(:media_metadata)}
+ _url, :download_thumbnail, _opts, _ot, _addl -> {:ok, ""}
+ end)
- {:ok, render_metadata(:media_metadata)}
+ Media.update_media_item(media_item, %{media_filepath: "foo.mp4"})
+
+ perform_job(MediaDownloadWorker, %{id: media_item.id, force: true})
+ end
+
+ test "sets force_overwrites runner option", %{media_item: media_item} do
+ expect(YtDlpRunnerMock, :run, 3, fn
+ _url, :get_downloadable_status, _opts, _ot, _addl ->
+ {:ok, "{}"}
+
+ _url, :download, opts, _ot, _addl ->
+ assert :force_overwrites in opts
+ refute :no_force_overwrites in opts
+
+ {:ok, render_metadata(:media_metadata)}
+
+ _url, :download_thumbnail, _opts, _ot, _addl ->
+ {:ok, ""}
end)
perform_job(MediaDownloadWorker, %{id: media_item.id, force: true})
@@ -216,25 +308,128 @@ defmodule Pinchflat.Downloading.MediaDownloadWorkerTest do
describe "perform/1 when testing re-downloads" do
test "sets redownloaded_at on the media_item", %{media_item: media_item} do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl ->
- {:ok, render_metadata(:media_metadata)}
- end)
-
perform_job(MediaDownloadWorker, %{id: media_item.id, quality_upgrade?: true})
media_item = Repo.reload(media_item)
assert media_item.media_redownloaded_at != nil
end
- test "sets force_overwrites runner option", %{media_item: media_item} do
- expect(YtDlpRunnerMock, :run, fn _url, opts, _ot, _addl ->
- assert :force_overwrites in opts
- refute :no_force_overwrites in opts
+ test "ignores whether the media item is pending when re-downloaded", %{media_item: media_item} do
+ expect(YtDlpRunnerMock, :run, 3, fn
+ _url, :get_downloadable_status, _opts, _ot, _addl -> {:ok, "{}"}
+ _url, :download, _opts, _ot, _addl -> {:ok, render_metadata(:media_metadata)}
+ _url, :download_thumbnail, _opts, _ot, _addl -> {:ok, ""}
+ end)
- {:ok, render_metadata(:media_metadata)}
+ Media.update_media_item(media_item, %{media_filepath: "foo.mp4"})
+
+ perform_job(MediaDownloadWorker, %{id: media_item.id, quality_upgrade?: true})
+ end
+
+ test "doesn't redownload if the source is set to not download", %{media_item: media_item} do
+ expect(YtDlpRunnerMock, :run, 0, fn _url, :download, _opts, _ot, _addl -> :ok end)
+
+ Sources.update_source(media_item.source, %{download_media: false})
+
+ perform_job(MediaDownloadWorker, %{id: media_item.id, quality_upgrade?: true})
+ end
+
+ test "doesn't redownload if the media item is set to not download", %{media_item: media_item} do
+ expect(YtDlpRunnerMock, :run, 0, fn _url, :download, _opts, _ot, _addl -> :ok end)
+
+ Media.update_media_item(media_item, %{prevent_download: true})
+
+ perform_job(MediaDownloadWorker, %{id: media_item.id, quality_upgrade?: true})
+ end
+
+ test "sets force_overwrites runner option", %{media_item: media_item} do
+ expect(YtDlpRunnerMock, :run, 3, fn
+ _url, :get_downloadable_status, _opts, _ot, _addl ->
+ {:ok, "{}"}
+
+ _url, :download, opts, _ot, _addl ->
+ assert :force_overwrites in opts
+ refute :no_force_overwrites in opts
+
+ {:ok, render_metadata(:media_metadata)}
+
+ _url, :download_thumbnail, _opts, _ot, _addl ->
+ {:ok, ""}
end)
perform_job(MediaDownloadWorker, %{id: media_item.id, force: true})
end
+
+ test "deletes old files if the media item has been updated" do
+ expect(YtDlpRunnerMock, :run, 3, fn
+ _url, :get_downloadable_status, _opts, _ot, _addl ->
+ {:ok, "{}"}
+
+ _url, :download, _opts, _ot, _addl ->
+ tmp_media_item = media_item_with_attachments()
+ metadata = render_parsed_metadata(:media_metadata)
+ metadata = Map.put(metadata, "filepath", tmp_media_item.media_filepath)
+
+ {:ok, Phoenix.json_library().encode!(metadata)}
+
+ _url, :download_thumbnail, _opts, _ot, _addl ->
+ {:ok, ""}
+ end)
+
+ old_media_item = media_item_with_attachments()
+ perform_job(MediaDownloadWorker, %{id: old_media_item.id, force: true})
+ updated_media_item = Repo.reload(old_media_item)
+
+ assert updated_media_item.media_filepath != old_media_item.media_filepath
+ refute File.exists?(old_media_item.media_filepath)
+ assert File.exists?(updated_media_item.media_filepath)
+ end
+ end
+
+ describe "perform/1 when testing user script callbacks" do
+ test "calls the media_pre_download user script runner", %{media_item: media_item} do
+ expect(UserScriptRunnerMock, :run, fn :media_pre_download, data ->
+ assert data.id == media_item.id
+
+ {:ok, "", 0}
+ end)
+
+ expect(UserScriptRunnerMock, :run, fn :media_downloaded, _ -> {:ok, "", 0} end)
+
+ perform_job(MediaDownloadWorker, %{id: media_item.id})
+ end
+
+ test "does not download the media if the pre-download script returns an error", %{media_item: media_item} do
+ expect(UserScriptRunnerMock, :run, fn :media_pre_download, _ -> {:ok, "", 1} end)
+
+ assert :ok = perform_job(MediaDownloadWorker, %{id: media_item.id})
+ media_item = Repo.reload!(media_item)
+
+ refute media_item.media_filepath
+ assert media_item.prevent_download
+ end
+
+ test "downloads media if the pre-download script is not present", %{media_item: media_item} do
+ expect(UserScriptRunnerMock, :run, fn :media_pre_download, _ -> {:ok, :no_executable} end)
+ expect(UserScriptRunnerMock, :run, fn :media_downloaded, _ -> {:ok, :no_executable} end)
+
+ assert :ok = perform_job(MediaDownloadWorker, %{id: media_item.id})
+ media_item = Repo.reload!(media_item)
+
+ assert media_item.media_filepath
+ refute media_item.prevent_download
+ end
+
+ test "calls the media_downloaded user script runner", %{media_item: media_item} do
+ expect(UserScriptRunnerMock, :run, fn :media_pre_download, _ -> {:ok, "", 0} end)
+
+ expect(UserScriptRunnerMock, :run, fn :media_downloaded, data ->
+ assert data.id == media_item.id
+
+ {:ok, "", 0}
+ end)
+
+ perform_job(MediaDownloadWorker, %{id: media_item.id})
+ end
end
end
diff --git a/test/pinchflat/downloading/media_downloader_test.exs b/test/pinchflat/downloading/media_downloader_test.exs
index 0aa5a8b..eb66b99 100644
--- a/test/pinchflat/downloading/media_downloader_test.exs
+++ b/test/pinchflat/downloading/media_downloader_test.exs
@@ -5,6 +5,7 @@ defmodule Pinchflat.Downloading.MediaDownloaderTest do
import Pinchflat.SourcesFixtures
import Pinchflat.ProfilesFixtures
+ alias Pinchflat.Media
alias Pinchflat.Downloading.MediaDownloader
setup do
@@ -15,28 +16,36 @@ defmodule Pinchflat.Downloading.MediaDownloaderTest do
)
stub(HTTPClientMock, :get, fn _url, _headers, _opts -> {:ok, ""} end)
- stub(YtDlpRunnerMock, :run, fn _url, _opts, _ot -> {:ok, ""} end)
{:ok, %{media_item: media_item}}
end
describe "download_for_media_item/3" do
- test "it calls the backend runner", %{media_item: media_item} do
- expect(YtDlpRunnerMock, :run, fn url, _opts, ot, addl ->
- assert url == media_item.original_url
- assert ot == "after_move:%()j"
- assert [{:output_filepath, filepath}] = addl
- assert is_binary(filepath)
+ test "calls the backend runner", %{media_item: media_item} do
+ expect(YtDlpRunnerMock, :run, 3, fn
+ _url, :get_downloadable_status, _opts, _ot, _addl ->
+ {:ok, "{}"}
- {:ok, render_metadata(:media_metadata)}
+ _url, :download_thumbnail, _opts, _ot, _addl ->
+ {:ok, ""}
+
+ url, :download, _opts, ot, addl ->
+ assert url == media_item.original_url
+ assert ot == "after_move:%()j"
+ assert [{:output_filepath, filepath} | _] = addl
+ assert is_binary(filepath)
+
+ {:ok, render_metadata(:media_metadata)}
end)
assert {:ok, _} = MediaDownloader.download_for_media_item(media_item)
end
- test "it saves the metadata filepath to the database", %{media_item: media_item} do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl ->
- {:ok, render_metadata(:media_metadata)}
+ test "saves the metadata filepath to the database", %{media_item: media_item} do
+ expect(YtDlpRunnerMock, :run, 3, fn
+ _url, :get_downloadable_status, _opts, _ot, _addl -> {:ok, "{}"}
+ _url, :download_thumbnail, _opts, _ot, _addl -> {:ok, ""}
+ _url, :download, _opts, _ot, _addl -> {:ok, render_metadata(:media_metadata)}
end)
assert is_nil(media_item.metadata)
@@ -46,31 +55,85 @@ defmodule Pinchflat.Downloading.MediaDownloaderTest do
assert updated_media_item.metadata.thumbnail_filepath =~ "media_items/#{media_item.id}/thumbnail.jpg"
end
- test "non-recoverable errors are passed through", %{media_item: media_item} do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl ->
- {:error, :some_error, 1}
+ test "errors for non-downloadable media are passed through", %{media_item: media_item} do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_downloadable_status, _opts, _ot, _addl ->
+ {:ok, Phoenix.json_library().encode!(%{"live_status" => "is_live"})}
end)
- assert {:error, :some_error} = MediaDownloader.download_for_media_item(media_item)
+ assert {:error, :unsuitable_for_download, message} = MediaDownloader.download_for_media_item(media_item)
+ assert message =~ "Media item ##{media_item.id} isn't suitable for download yet."
+ end
+
+ test "non-recoverable errors are passed through", %{media_item: media_item} do
+ expect(YtDlpRunnerMock, :run, 2, fn
+ _url, :get_downloadable_status, _opts, _ot, _addl -> {:ok, "{}"}
+ _url, :download, _opts, _ot, _addl -> {:error, :some_error, 1}
+ end)
+
+ assert {:error, :download_failed, :some_error} = MediaDownloader.download_for_media_item(media_item)
end
test "unknown errors are passed through", %{media_item: media_item} do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl ->
- {:error, :some_error}
+ expect(YtDlpRunnerMock, :run, 2, fn
+ _url, :get_downloadable_status, _opts, _ot, _addl -> {:ok, "{}"}
+ _url, :download, _opts, _ot, _addl -> {:error, :some_error}
end)
- assert {:error, message} = MediaDownloader.download_for_media_item(media_item)
+ assert {:error, :unknown, message} = MediaDownloader.download_for_media_item(media_item)
assert message == "Unknown error: {:error, :some_error}"
end
end
+ describe "download_for_media_item/3 when testing non-downloadable media" do
+ test "calls the download runner if the media is currently downloadable", %{media_item: media_item} do
+ expect(YtDlpRunnerMock, :run, 3, fn
+ _url, :get_downloadable_status, _opts, _ot, _addl ->
+ {:ok, Phoenix.json_library().encode!(%{"live_status" => "was_live"})}
+
+ _url, :download, _opts, _ot, _addl ->
+ {:ok, render_metadata(:media_metadata)}
+
+ _url, :download_thumbnail, _opts, _ot, _addl ->
+ {:ok, ""}
+ end)
+
+ assert {:ok, _} = MediaDownloader.download_for_media_item(media_item)
+ end
+
+ test "does not call the download runner if the media is not downloadable", %{media_item: media_item} do
+ expect(YtDlpRunnerMock, :run, fn
+ _url, :get_downloadable_status, _opts, _ot, _addl ->
+ {:ok, Phoenix.json_library().encode!(%{"live_status" => "is_live"})}
+ end)
+
+ expect(YtDlpRunnerMock, :run, 0, fn _url, :download, _opts, _ot, _addl -> {:ok, ""} end)
+
+ assert {:error, :unsuitable_for_download, message} = MediaDownloader.download_for_media_item(media_item)
+ assert message =~ "Media item ##{media_item.id} isn't suitable for download yet."
+ end
+
+ test "returns unexpected errors from the download status determination method", %{media_item: media_item} do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_downloadable_status, _opts, _ot, _addl -> {:error, :what_tha} end)
+
+ assert {:error, :unknown, "Unknown error: {:error, :what_tha}"} =
+ MediaDownloader.download_for_media_item(media_item)
+ end
+ end
+
describe "download_for_media_item/3 when testing override options" do
test "includes override opts if specified", %{media_item: media_item} do
- expect(YtDlpRunnerMock, :run, fn _url, opts, _ot, _addl ->
- refute :force_overwrites in opts
- assert :no_force_overwrites in opts
+ expect(YtDlpRunnerMock, :run, 3, fn
+ _url, :get_downloadable_status, _opts, _ot, _addl ->
+ {:ok, "{}"}
- {:ok, render_metadata(:media_metadata)}
+ _url, :download, opts, _ot, _addl ->
+ refute :force_overwrites in opts
+ assert :no_force_overwrites in opts
+
+ {:ok, render_metadata(:media_metadata)}
+
+ _url, :download_thumbnail, _opts, _ot, _addl ->
+ {:ok, ""}
end)
override_opts = [overwrite_behaviour: :no_force_overwrites]
@@ -79,93 +142,297 @@ defmodule Pinchflat.Downloading.MediaDownloaderTest do
end
end
- describe "download_for_media_item/3 when testing retries" do
+ describe "download_for_media_item/3 when testing cookie usage" do
+ test "sets use_cookies if the source uses cookies" do
+ expect(YtDlpRunnerMock, :run, 3, fn
+ _url, :get_downloadable_status, _opts, _ot, addl ->
+ assert {:use_cookies, true} in addl
+ {:ok, "{}"}
+
+ _url, :download, _opts, _ot, addl ->
+ assert {:use_cookies, true} in addl
+ {:ok, render_metadata(:media_metadata)}
+
+ _url, :download_thumbnail, _opts, _ot, _addl ->
+ {:ok, ""}
+ end)
+
+ source = source_fixture(%{cookie_behaviour: :all_operations})
+ media_item = media_item_fixture(%{source_id: source.id})
+
+ assert {:ok, _} = MediaDownloader.download_for_media_item(media_item)
+ end
+
+ test "does not set use_cookies if the source uses cookies when needed" do
+ expect(YtDlpRunnerMock, :run, 3, fn
+ _url, :get_downloadable_status, _opts, _ot, addl ->
+ assert {:use_cookies, false} in addl
+ {:ok, "{}"}
+
+ _url, :download, _opts, _ot, addl ->
+ assert {:use_cookies, false} in addl
+ {:ok, render_metadata(:media_metadata)}
+
+ _url, :download_thumbnail, _opts, _ot, _addl ->
+ {:ok, ""}
+ end)
+
+ source = source_fixture(%{cookie_behaviour: :when_needed})
+ media_item = media_item_fixture(%{source_id: source.id})
+
+ assert {:ok, _} = MediaDownloader.download_for_media_item(media_item)
+ end
+
+ test "does not set use_cookies if the source does not use cookies" do
+ expect(YtDlpRunnerMock, :run, 3, fn
+ _url, :get_downloadable_status, _opts, _ot, addl ->
+ assert {:use_cookies, false} in addl
+ {:ok, "{}"}
+
+ _url, :download, _opts, _ot, addl ->
+ assert {:use_cookies, false} in addl
+ {:ok, render_metadata(:media_metadata)}
+
+ _url, :download_thumbnail, _opts, _ot, _addl ->
+ {:ok, ""}
+ end)
+
+ source = source_fixture(%{cookie_behaviour: :disabled})
+ media_item = media_item_fixture(%{source_id: source.id})
+
+ assert {:ok, _} = MediaDownloader.download_for_media_item(media_item)
+ end
+ end
+
+ describe "download_for_media_item/3 when testing non-cookie retries" do
test "returns a recovered tuple on recoverable errors", %{media_item: media_item} do
message = "Unable to communicate with SponsorBlock"
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl ->
- {:error, message, 1}
+ expect(YtDlpRunnerMock, :run, 3, fn
+ _url, :get_downloadable_status, _opts, _ot, _addl ->
+ {:ok, "{}"}
+
+ _url, :download, _opts, _ot, addl ->
+ [{:output_filepath, filepath} | _] = addl
+ File.write(filepath, render_metadata(:media_metadata))
+
+ {:error, message, 1}
+
+ _url, :download_thumbnail, _opts, _ot, _addl ->
+ {:ok, ""}
end)
- assert {:recovered, ^message} = MediaDownloader.download_for_media_item(media_item)
+ assert {:recovered, _media_item, ^message} = MediaDownloader.download_for_media_item(media_item)
end
test "attempts to update the media item on recoverable errors", %{media_item: media_item} do
message = "Unable to communicate with SponsorBlock"
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, addl ->
- [{:output_filepath, filepath}] = addl
- File.write(filepath, render_metadata(:media_metadata))
+ expect(YtDlpRunnerMock, :run, 3, fn
+ _url, :download, _opts, _ot, addl ->
+ [{:output_filepath, filepath} | _] = addl
+ File.write(filepath, render_metadata(:media_metadata))
- {:error, message, 1}
+ {:error, message, 1}
+
+ _url, :get_downloadable_status, _opts, _ot, _addl ->
+ {:ok, "{}"}
+
+ _url, :download_thumbnail, _opts, _ot, _addl ->
+ {:ok, ""}
end)
- assert {:recovered, ^message} = MediaDownloader.download_for_media_item(media_item)
+ assert {:recovered, updated_media_item, ^message} = MediaDownloader.download_for_media_item(media_item)
+
+ assert DateTime.diff(DateTime.utc_now(), updated_media_item.media_downloaded_at) < 2
+ assert String.ends_with?(updated_media_item.media_filepath, ".mkv")
+ end
+
+ test "returns an unrecoverable tuple if recovery fails", %{media_item: media_item} do
+ message = "Unable to communicate with SponsorBlock"
+
+ expect(YtDlpRunnerMock, :run, 2, fn
+ _url, :get_downloadable_status, _opts, _ot, _addl ->
+ {:ok, "{}"}
+
+ _url, :download, _opts, _ot, _addl ->
+ # This errors because the metadata is not written to the file so JSON parsing fails
+ {:error, message, 1}
+ end)
+
+ assert {:error, :unrecoverable, ^message} = MediaDownloader.download_for_media_item(media_item)
+ end
+
+ test "sets the last_error appropriately when recovered", %{media_item: media_item} do
+ expect(YtDlpRunnerMock, :run, 3, fn
+ _url, :download, _opts, _ot, addl ->
+ [{:output_filepath, filepath} | _] = addl
+ File.write(filepath, render_metadata(:media_metadata))
+
+ {:error, "Unable to communicate with SponsorBlock", 1}
+
+ _url, :get_downloadable_status, _opts, _ot, _addl ->
+ {:ok, "{}"}
+
+ _url, :download_thumbnail, _opts, _ot, _addl ->
+ {:ok, ""}
+ end)
+
+ assert {:recovered, updated_media_item, _message} = MediaDownloader.download_for_media_item(media_item)
+ assert updated_media_item.last_error == "Unable to communicate with SponsorBlock"
+ end
+
+ test "sets the last_error appropriately when unrecoverable", %{media_item: media_item} do
+ expect(YtDlpRunnerMock, :run, 2, fn
+ _url, :get_downloadable_status, _opts, _ot, _addl ->
+ {:ok, "{}"}
+
+ _url, :download, _opts, _ot, _addl ->
+ {:error, "Unable to communicate with SponsorBlock", 1}
+ end)
+
+ assert {:error, :unrecoverable, _message} = MediaDownloader.download_for_media_item(media_item)
media_item = Repo.reload(media_item)
- assert DateTime.diff(DateTime.utc_now(), media_item.media_downloaded_at) < 2
- assert String.ends_with?(media_item.media_filepath, ".mkv")
+ assert media_item.last_error == "Unable to communicate with SponsorBlock"
+ end
+ end
+
+ describe "download_for_media_item/3 when testing cookie retries" do
+ test "retries with cookies if we think it would help and the source allows" do
+ expect(YtDlpRunnerMock, :run, 4, fn
+ _url, :get_downloadable_status, _opts, _ot, [use_cookies: false] ->
+ {:error, "Sign in to confirm your age", 1}
+
+ _url, :get_downloadable_status, _opts, _ot, [use_cookies: true] ->
+ {:ok, "{}"}
+
+ _url, :download, _opts, _ot, addl ->
+ assert {:use_cookies, true} in addl
+ {:ok, render_metadata(:media_metadata)}
+
+ _url, :download_thumbnail, _opts, _ot, _addl ->
+ {:ok, ""}
+ end)
+
+ source = source_fixture(%{cookie_behaviour: :when_needed})
+ media_item = media_item_fixture(%{source_id: source.id})
+
+ assert {:ok, _} = MediaDownloader.download_for_media_item(media_item)
+ end
+
+ test "does not retry with cookies if we don't think it would help even the source allows" do
+ expect(YtDlpRunnerMock, :run, 1, fn
+ _url, :get_downloadable_status, _opts, _ot, [use_cookies: false] ->
+ {:error, "Some other error", 1}
+ end)
+
+ source = source_fixture(%{cookie_behaviour: :when_needed})
+ media_item = media_item_fixture(%{source_id: source.id})
+
+ assert {:error, :download_failed, "Some other error"} = MediaDownloader.download_for_media_item(media_item)
+ end
+
+ test "does not retry with cookies even if we think it would help but source doesn't allow" do
+ expect(YtDlpRunnerMock, :run, 1, fn
+ _url, :get_downloadable_status, _opts, _ot, [use_cookies: false] ->
+ {:error, "Sign in to confirm your age", 1}
+ end)
+
+ source = source_fixture(%{cookie_behaviour: :disabled})
+ media_item = media_item_fixture(%{source_id: source.id})
+
+ assert {:error, :download_failed, "Sign in to confirm your age"} =
+ MediaDownloader.download_for_media_item(media_item)
+ end
+
+ test "does not retry with cookies if cookies were already used" do
+ expect(YtDlpRunnerMock, :run, 1, fn
+ _url, :get_downloadable_status, _opts, _ot, [use_cookies: true] ->
+ {:error, "This video is available to this channel's members", 1}
+ end)
+
+ source = source_fixture(%{cookie_behaviour: :all_operations})
+ media_item = media_item_fixture(%{source_id: source.id})
+
+ assert {:error, :download_failed, "This video is available to this channel's members"} =
+ MediaDownloader.download_for_media_item(media_item)
end
end
describe "download_for_media_item/3 when testing media_item attributes" do
setup do
- stub(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl ->
- {:ok, render_metadata(:media_metadata)}
+ stub(YtDlpRunnerMock, :run, fn
+ _url, :download, _opts, _ot, _addl -> {:ok, render_metadata(:media_metadata)}
+ _url, :get_downloadable_status, _opts, _ot, _addl -> {:ok, "{}"}
+ _url, :download_thumbnail, _opts, _ot, _addl -> {:ok, ""}
end)
:ok
end
- test "it sets the media_downloaded_at", %{media_item: media_item} do
+ test "sets the media_downloaded_at", %{media_item: media_item} do
assert media_item.media_downloaded_at == nil
assert {:ok, updated_media_item} = MediaDownloader.download_for_media_item(media_item)
assert DateTime.diff(DateTime.utc_now(), updated_media_item.media_downloaded_at) < 2
end
- test "it extracts the title", %{media_item: media_item} do
+ test "sets the culled_at to nil", %{media_item: media_item} do
+ Media.update_media_item(media_item, %{culled_at: DateTime.utc_now()})
+ assert {:ok, updated_media_item} = MediaDownloader.download_for_media_item(media_item)
+ assert updated_media_item.culled_at == nil
+ end
+
+ test "extracts the title", %{media_item: media_item} do
assert {:ok, updated_media_item} = MediaDownloader.download_for_media_item(media_item)
assert updated_media_item.title == "Pinchflat Example Video"
end
- test "it extracts the description", %{media_item: media_item} do
+ test "extracts the description", %{media_item: media_item} do
assert {:ok, updated_media_item} = MediaDownloader.download_for_media_item(media_item)
assert is_binary(updated_media_item.description)
end
- test "it extracts the media_filepath", %{media_item: media_item} do
+ test "extracts the media_filepath", %{media_item: media_item} do
assert media_item.media_filepath == nil
assert {:ok, updated_media_item} = MediaDownloader.download_for_media_item(media_item)
assert String.ends_with?(updated_media_item.media_filepath, ".mkv")
end
- test "it extracts the subtitle_filepaths", %{media_item: media_item} do
+ test "extracts the subtitle_filepaths", %{media_item: media_item} do
assert media_item.subtitle_filepaths == []
assert {:ok, updated_media_item} = MediaDownloader.download_for_media_item(media_item)
assert [["de", _], ["en", _] | _rest] = updated_media_item.subtitle_filepaths
end
- test "it extracts the duration_seconds", %{media_item: media_item} do
+ test "extracts the duration_seconds", %{media_item: media_item} do
assert media_item.duration_seconds == nil
assert {:ok, updated_media_item} = MediaDownloader.download_for_media_item(media_item)
assert is_integer(updated_media_item.duration_seconds)
end
- test "it extracts the thumbnail_filepath", %{media_item: media_item} do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl ->
- metadata = render_parsed_metadata(:media_metadata)
+ test "extracts the thumbnail_filepath", %{media_item: media_item} do
+ expect(YtDlpRunnerMock, :run, 3, fn
+ _url, :get_downloadable_status, _opts, _ot, _addl ->
+ {:ok, "{}"}
- thumbnail_filepath =
- metadata["thumbnails"]
- |> Enum.reverse()
- |> Enum.find_value(fn attrs -> attrs["filepath"] end)
- |> String.split(~r{\.}, include_captures: true)
- |> List.insert_at(-3, "-thumb")
- |> Enum.join()
+ _url, :download, _opts, _ot, _addl ->
+ metadata = render_parsed_metadata(:media_metadata)
- :ok = File.cp(thumbnail_filepath_fixture(), thumbnail_filepath)
+ thumbnail_filepath =
+ metadata["thumbnails"]
+ |> Enum.reverse()
+ |> Enum.find_value(fn attrs -> attrs["filepath"] end)
+ |> String.split(~r{\.}, include_captures: true)
+ |> List.insert_at(-3, "-thumb")
+ |> Enum.join()
- {:ok, Phoenix.json_library().encode!(metadata)}
+ :ok = File.cp(thumbnail_filepath_fixture(), thumbnail_filepath)
+
+ {:ok, Phoenix.json_library().encode!(metadata)}
+
+ _url, :download_thumbnail, _opts, _ot, _addl ->
+ {:ok, ""}
end)
assert media_item.thumbnail_filepath == nil
@@ -175,14 +442,20 @@ defmodule Pinchflat.Downloading.MediaDownloaderTest do
File.rm(updated_media_item.thumbnail_filepath)
end
- test "it extracts the metadata_filepath", %{media_item: media_item} do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl ->
- metadata = render_parsed_metadata(:media_metadata)
+ test "extracts the metadata_filepath", %{media_item: media_item} do
+ expect(YtDlpRunnerMock, :run, 3, fn
+ _url, :get_downloadable_status, _opts, _ot, _addl ->
+ {:ok, "{}"}
- infojson_filepath = metadata["infojson_filename"]
- :ok = File.cp(infojson_filepath_fixture(), infojson_filepath)
+ _url, :download, _opts, _ot, _addl ->
+ metadata = render_parsed_metadata(:media_metadata)
+ infojson_filepath = metadata["infojson_filename"]
+ :ok = File.cp(infojson_filepath_fixture(), infojson_filepath)
- {:ok, Phoenix.json_library().encode!(metadata)}
+ {:ok, Phoenix.json_library().encode!(metadata)}
+
+ _url, :download_thumbnail, _opts, _ot, _addl ->
+ {:ok, ""}
end)
assert media_item.metadata_filepath == nil
@@ -191,18 +464,39 @@ defmodule Pinchflat.Downloading.MediaDownloaderTest do
File.rm(updated_media_item.metadata_filepath)
end
+
+ test "sets the last_error to nil on success" do
+ media_item = media_item_fixture(%{last_error: "Some error"})
+
+ assert {:ok, updated_media_item} = MediaDownloader.download_for_media_item(media_item)
+ assert updated_media_item.last_error == nil
+ end
+
+ test "sets the last_error to the error message on failure", %{media_item: media_item} do
+ expect(YtDlpRunnerMock, :run, 2, fn
+ _url, :get_downloadable_status, _opts, _ot, _addl -> {:ok, "{}"}
+ _url, :download, _opts, _ot, _addl -> {:error, :some_error}
+ end)
+
+ assert {:error, :unknown, _message} = MediaDownloader.download_for_media_item(media_item)
+ media_item = Repo.reload(media_item)
+
+ assert media_item.last_error == "Unknown error: {:error, :some_error}"
+ end
end
describe "download_for_media_item/3 when testing NFO generation" do
setup do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl ->
- {:ok, render_metadata(:media_metadata)}
+ stub(YtDlpRunnerMock, :run, fn
+ _url, :download, _opts, _ot, _addl -> {:ok, render_metadata(:media_metadata)}
+ _url, :get_downloadable_status, _opts, _ot, _addl -> {:ok, "{}"}
+ _url, :download_thumbnail, _opts, _ot, _addl -> {:ok, ""}
end)
:ok
end
- test "it generates an NFO file if the source is set to download NFOs" do
+ test "generates an NFO file if the source is set to download NFOs" do
profile = media_profile_fixture(%{download_nfo: true})
source = source_fixture(%{media_profile_id: profile.id})
media_item = media_item_fixture(%{source_id: source.id})
@@ -215,7 +509,7 @@ defmodule Pinchflat.Downloading.MediaDownloaderTest do
File.rm!(updated_media_item.nfo_filepath)
end
- test "it does not generate an NFO file if the source is set to not download NFOs" do
+ test "does not generate an NFO file if the source is set to not download NFOs" do
profile = media_profile_fixture(%{download_nfo: false})
source = source_fixture(%{media_profile_id: profile.id})
media_item = media_item_fixture(%{source_id: source.id})
diff --git a/test/pinchflat/downloading/media_retention_worker_test.exs b/test/pinchflat/downloading/media_retention_worker_test.exs
index 85ffdd9..5ddec69 100644
--- a/test/pinchflat/downloading/media_retention_worker_test.exs
+++ b/test/pinchflat/downloading/media_retention_worker_test.exs
@@ -7,15 +7,36 @@ defmodule Pinchflat.Downloading.MediaRetentionWorkerTest do
alias Pinchflat.Media
alias Pinchflat.Downloading.MediaRetentionWorker
- describe "perform/1" do
- setup do
- stub(UserScriptRunnerMock, :run, fn _event_type, _data -> :ok end)
+ setup do
+ stub(UserScriptRunnerMock, :run, fn _event_type, _data -> {:ok, "", 0} end)
- :ok
+ :ok
+ end
+
+ describe "perform/1" do
+ test "sets deleted media to not re-download" do
+ {_source, old_media_item, new_media_item} = prepare_records_for_retention_date()
+
+ perform_job(MediaRetentionWorker, %{})
+
+ refute Repo.reload!(new_media_item).prevent_download
+ assert Repo.reload!(old_media_item).prevent_download
end
+ test "sets culled_at timestamp on deleted media" do
+ {_source, old_media_item, new_media_item} = prepare_records_for_retention_date()
+
+ perform_job(MediaRetentionWorker, %{})
+
+ refute Repo.reload!(new_media_item).culled_at
+ assert Repo.reload!(old_media_item).culled_at
+ assert DateTime.diff(now(), Repo.reload!(old_media_item).culled_at) < 1
+ end
+ end
+
+ describe "perform/1 when testing retention_period-based culling" do
test "deletes media files that are past their retention date" do
- {_source, old_media_item, new_media_item} = prepare_records()
+ {_source, old_media_item, new_media_item} = prepare_records_for_retention_date()
perform_job(MediaRetentionWorker, %{})
@@ -25,27 +46,50 @@ defmodule Pinchflat.Downloading.MediaRetentionWorkerTest do
refute Repo.reload!(old_media_item).media_filepath
end
- test "sets deleted media to not re-download" do
- {_source, old_media_item, new_media_item} = prepare_records()
+ test "deletes media files that are on their retention date per the 24-h clock" do
+ {_source, old_media_item, new_media_item} = prepare_records_for_retention_date(2)
+
+ just_over_two_days_ago = now_minus(2, :days) |> DateTime.add(-1, :minute)
+ just_under_two_days_ago = now_minus(2, :days) |> DateTime.add(1, :minute)
+
+ Media.update_media_item(old_media_item, %{media_downloaded_at: just_over_two_days_ago})
+ Media.update_media_item(new_media_item, %{media_downloaded_at: just_under_two_days_ago})
perform_job(MediaRetentionWorker, %{})
- refute Repo.reload!(new_media_item).prevent_download
- assert Repo.reload!(old_media_item).prevent_download
+ assert File.exists?(new_media_item.media_filepath)
+ refute File.exists?(old_media_item.media_filepath)
+ assert Repo.reload!(new_media_item).media_filepath
+ refute Repo.reload!(old_media_item).media_filepath
end
- test "sets culled_at timestamp on deleted media" do
- {_source, old_media_item, new_media_item} = prepare_records()
+ test "sets culled_at and prevent_download" do
+ {_source, old_media_item, new_media_item} = prepare_records_for_retention_date()
perform_job(MediaRetentionWorker, %{})
refute Repo.reload!(new_media_item).culled_at
assert Repo.reload!(old_media_item).culled_at
- assert DateTime.diff(now(), Repo.reload!(old_media_item).culled_at) < 1
+ refute Repo.reload!(new_media_item).prevent_download
+ assert Repo.reload!(old_media_item).prevent_download
+ end
+
+ test "doesn't cull if the source doesn't have a retention period" do
+ {_source, old_media_item, new_media_item} = prepare_records_for_retention_date(nil)
+
+ perform_job(MediaRetentionWorker, %{})
+
+ assert File.exists?(new_media_item.media_filepath)
+ assert File.exists?(old_media_item.media_filepath)
+ assert Repo.reload!(new_media_item).media_filepath
+ assert Repo.reload!(old_media_item).media_filepath
+
+ refute Repo.reload!(new_media_item).culled_at
+ refute Repo.reload!(old_media_item).culled_at
end
test "doesn't cull media items that have prevent_culling set" do
- {_source, old_media_item, _new_media_item} = prepare_records()
+ {_source, old_media_item, _new_media_item} = prepare_records_for_retention_date()
Media.update_media_item(old_media_item, %{prevent_culling: true})
@@ -53,11 +97,101 @@ defmodule Pinchflat.Downloading.MediaRetentionWorkerTest do
assert File.exists?(old_media_item.media_filepath)
assert Repo.reload!(old_media_item).media_filepath
+ refute Repo.reload!(old_media_item).culled_at
+ end
+
+ test "doesn't cull if the media item has no media_filepath" do
+ {_source, old_media_item, _new_media_item} = prepare_records_for_retention_date()
+
+ Media.update_media_item(old_media_item, %{media_filepath: nil})
+
+ perform_job(MediaRetentionWorker, %{})
+
+ refute Repo.reload!(old_media_item).culled_at
end
end
- defp prepare_records do
- source = source_fixture(%{retention_period_days: 2})
+ describe "perform/1 when testing source cutoff-based culling" do
+ test "culls media from before the cutoff date" do
+ {_source, old_media_item, new_media_item} = prepare_records_for_source_cutoff_date()
+
+ perform_job(MediaRetentionWorker, %{})
+
+ assert File.exists?(new_media_item.media_filepath)
+ refute File.exists?(old_media_item.media_filepath)
+ assert Repo.reload!(new_media_item).media_filepath
+ refute Repo.reload!(old_media_item).media_filepath
+ end
+
+ # NOTE: Since this is a date and not a datetime, we can't add logic to have to-the-minute
+ # comparison like we can with retention periods. We can only compare to the day.
+ test "doesn't cull media from on or after the cutoff date" do
+ {_source, old_media_item, new_media_item} = prepare_records_for_source_cutoff_date(2)
+
+ Media.update_media_item(old_media_item, %{uploaded_at: now_minus(2, :days)})
+ Media.update_media_item(new_media_item, %{uploaded_at: now_minus(1, :day)})
+
+ perform_job(MediaRetentionWorker, %{})
+
+ assert File.exists?(new_media_item.media_filepath)
+ assert File.exists?(old_media_item.media_filepath)
+ assert Repo.reload!(new_media_item).media_filepath
+ assert Repo.reload!(old_media_item).media_filepath
+
+ refute Repo.reload!(new_media_item).culled_at
+ refute Repo.reload!(old_media_item).culled_at
+ end
+
+ test "sets culled_at but not prevent_download" do
+ {_source, old_media_item, new_media_item} = prepare_records_for_source_cutoff_date()
+
+ perform_job(MediaRetentionWorker, %{})
+
+ refute Repo.reload!(new_media_item).culled_at
+ assert Repo.reload!(old_media_item).culled_at
+ refute Repo.reload!(new_media_item).prevent_download
+ refute Repo.reload!(old_media_item).prevent_download
+ end
+
+ test "doesn't cull media if the source doesn't have a cutoff date" do
+ {_source, old_media_item, new_media_item} = prepare_records_for_source_cutoff_date(nil)
+
+ perform_job(MediaRetentionWorker, %{})
+
+ assert File.exists?(new_media_item.media_filepath)
+ assert File.exists?(old_media_item.media_filepath)
+ assert Repo.reload!(new_media_item).media_filepath
+ assert Repo.reload!(old_media_item).media_filepath
+
+ refute Repo.reload!(new_media_item).culled_at
+ refute Repo.reload!(old_media_item).culled_at
+ end
+
+ test "doesn't cull media items that have prevent_culling set" do
+ {_source, old_media_item, _new_media_item} = prepare_records_for_source_cutoff_date()
+
+ Media.update_media_item(old_media_item, %{prevent_culling: true})
+
+ perform_job(MediaRetentionWorker, %{})
+
+ assert File.exists?(old_media_item.media_filepath)
+ assert Repo.reload!(old_media_item).media_filepath
+ refute Repo.reload!(old_media_item).culled_at
+ end
+
+ test "doesn't cull if the media item has no media_filepath" do
+ {_source, old_media_item, _new_media_item} = prepare_records_for_source_cutoff_date()
+
+ Media.update_media_item(old_media_item, %{media_filepath: nil})
+
+ perform_job(MediaRetentionWorker, %{})
+
+ refute Repo.reload!(old_media_item).culled_at
+ end
+ end
+
+ defp prepare_records_for_retention_date(retention_period_days \\ 2) do
+ source = source_fixture(%{retention_period_days: retention_period_days})
old_media_item =
media_item_with_attachments(%{
@@ -73,4 +207,23 @@ defmodule Pinchflat.Downloading.MediaRetentionWorkerTest do
{source, old_media_item, new_media_item}
end
+
+ defp prepare_records_for_source_cutoff_date(download_cutoff_date_days_ago \\ 2) do
+ cutoff_date = if download_cutoff_date_days_ago, do: now_minus(download_cutoff_date_days_ago, :days), else: nil
+ source = source_fixture(%{download_cutoff_date: cutoff_date})
+
+ old_media_item =
+ media_item_with_attachments(%{
+ source_id: source.id,
+ uploaded_at: now_minus(3, :days)
+ })
+
+ new_media_item =
+ media_item_with_attachments(%{
+ source_id: source.id,
+ uploaded_at: now_minus(1, :day)
+ })
+
+ {source, old_media_item, new_media_item}
+ end
end
diff --git a/test/pinchflat/downloading/quality_option_builder_test.exs b/test/pinchflat/downloading/quality_option_builder_test.exs
new file mode 100644
index 0000000..f370b83
--- /dev/null
+++ b/test/pinchflat/downloading/quality_option_builder_test.exs
@@ -0,0 +1,109 @@
+defmodule Pinchflat.Downloading.QualityOptionBuilderTest do
+ use Pinchflat.DataCase
+ import Pinchflat.ProfilesFixtures
+
+ alias Pinchflat.Profiles
+ alias Pinchflat.Settings
+ alias Pinchflat.Downloading.QualityOptionBuilder
+
+ describe "build/1" do
+ test "includes format options if audio_track is set to original" do
+ media_profile = media_profile_fixture(%{audio_track: "original"})
+
+ assert res = QualityOptionBuilder.build(media_profile)
+
+ assert {:format, "bestvideo+bestaudio[format_note*=original]/bestvideo*+bestaudio/best"} in res
+ end
+
+ test "includes format options if audio_track is set to default" do
+ media_profile = media_profile_fixture(%{audio_track: "default"})
+
+ assert res = QualityOptionBuilder.build(media_profile)
+
+ assert {:format, "bestvideo+bestaudio[format_note*='(default)']/bestvideo*+bestaudio/best"} in res
+ end
+
+ test "includes format options if audio_track is set to a language code" do
+ media_profile = media_profile_fixture(%{audio_track: "en"})
+
+ assert res = QualityOptionBuilder.build(media_profile)
+
+ assert {:format, "bestvideo+bestaudio[language^=en]/bestvideo*+bestaudio/best"} in res
+ end
+ end
+
+ describe "build/1 when testing audio profiles" do
+ setup do
+ {:ok, media_profile: media_profile_fixture(%{preferred_resolution: :audio})}
+ end
+
+ test "includes quality options for audio only", %{media_profile: media_profile} do
+ assert res = QualityOptionBuilder.build(media_profile)
+
+ assert :extract_audio in res
+ assert {:format_sort, "+acodec:m4a"} in res
+
+ refute {:remux_video, "mp4"} in res
+ end
+
+ test "includes custom format target for audio if specified", %{media_profile: media_profile} do
+ {:ok, media_profile} =
+ Profiles.update_media_profile(media_profile, %{media_container: "flac", preferred_resolution: :audio})
+
+ assert res = QualityOptionBuilder.build(media_profile)
+
+ assert {:audio_format, "flac"} in res
+ end
+
+ test "includes custom format options", %{media_profile: media_profile} do
+ assert res = QualityOptionBuilder.build(media_profile)
+
+ assert {:format, "bestaudio/best"} in res
+ end
+ end
+
+ describe "build/1 when testing non-audio profiles" do
+ setup do
+ {:ok, media_profile: media_profile_fixture(%{preferred_resolution: :"480p"})}
+ end
+
+ test "includes quality options" do
+ resolutions = ["360", "480", "720", "1080", "1440", "2160", "4320"]
+
+ Enum.each(resolutions, fn resolution ->
+ resolution_atom = String.to_existing_atom(resolution <> "p")
+ media_profile = media_profile_fixture(%{preferred_resolution: resolution_atom})
+
+ assert res = QualityOptionBuilder.build(media_profile)
+
+ assert {:format_sort, "res:#{resolution},+codec:avc:m4a"} in res
+ assert {:remux_video, "mp4"} in res
+ end)
+ end
+
+ test "includes custom quality options if specified", %{media_profile: media_profile} do
+ Settings.set(video_codec_preference: "av01")
+ Settings.set(audio_codec_preference: "aac")
+
+ {:ok, media_profile} = Profiles.update_media_profile(media_profile, %{preferred_resolution: :"1080p"})
+
+ assert res = QualityOptionBuilder.build(media_profile)
+
+ assert {:format_sort, "res:1080,+codec:av01:aac"} in res
+ end
+
+ test "includes custom remux target for videos if specified", %{media_profile: media_profile} do
+ {:ok, media_profile} = Profiles.update_media_profile(media_profile, %{media_container: "mkv"})
+
+ assert res = QualityOptionBuilder.build(media_profile)
+
+ assert {:remux_video, "mkv"} in res
+ end
+
+ test "includes custom format options", %{media_profile: media_profile} do
+ assert res = QualityOptionBuilder.build(media_profile)
+
+ assert {:format, "bestvideo*+bestaudio/best"} in res
+ end
+ end
+end
diff --git a/test/pinchflat/fast_indexing/fast_indexing_helpers_test.exs b/test/pinchflat/fast_indexing/fast_indexing_helpers_test.exs
index 46f4226..77be803 100644
--- a/test/pinchflat/fast_indexing/fast_indexing_helpers_test.exs
+++ b/test/pinchflat/fast_indexing/fast_indexing_helpers_test.exs
@@ -1,6 +1,7 @@
defmodule Pinchflat.FastIndexing.FastIndexingHelpersTest do
use Pinchflat.DataCase
+ import Pinchflat.TasksFixtures
import Pinchflat.MediaFixtures
import Pinchflat.SourcesFixtures
import Pinchflat.ProfilesFixtures
@@ -8,47 +9,77 @@ defmodule Pinchflat.FastIndexing.FastIndexingHelpersTest do
alias Pinchflat.Tasks
alias Pinchflat.Settings
alias Pinchflat.Media.MediaItem
+ alias Pinchflat.FastIndexing.FastIndexingWorker
alias Pinchflat.Downloading.MediaDownloadWorker
alias Pinchflat.FastIndexing.FastIndexingHelpers
setup do
- stub(YtDlpRunnerMock, :run, fn _url, _opts, _ot ->
+ stub(YtDlpRunnerMock, :run, fn _url, :get_media_attributes, _opts, _ot, _addl ->
{:ok, media_attributes_return_fixture()}
end)
{:ok, [source: source_fixture()]}
end
- describe "kickoff_download_tasks_from_youtube_rss_feed/1" do
- test "enqueues a new worker for each new media_id in the source's RSS feed", %{source: source} do
+ describe "kickoff_indexing_task/1" do
+ test "deletes any existing fast indexing tasks", %{source: source} do
+ {:ok, job} = Oban.insert(FastIndexingWorker.new(%{"id" => source.id}))
+ task = task_fixture(source_id: source.id, job_id: job.id)
+
+ assert Repo.reload!(task)
+ assert {:ok, _} = FastIndexingHelpers.kickoff_indexing_task(source)
+ assert_raise Ecto.NoResultsError, fn -> Repo.reload!(task) end
+ end
+
+ test "kicks off a new fast indexing task", %{source: source} do
+ assert {:ok, _} = FastIndexingHelpers.kickoff_indexing_task(source)
+ assert [worker] = all_enqueued(worker: FastIndexingWorker)
+ assert worker.args["id"] == source.id
+ end
+ end
+
+ describe "index_and_kickoff_downloads/1" do
+ test "enqueues a worker for each new media_id in the source's RSS feed", %{source: source} do
expect(HTTPClientMock, :get, fn _url -> {:ok, "test_1 "} end)
- assert [media_item] = FastIndexingHelpers.kickoff_download_tasks_from_youtube_rss_feed(source)
+ assert [media_item] = FastIndexingHelpers.index_and_kickoff_downloads(source)
assert [worker] = all_enqueued(worker: MediaDownloadWorker)
assert worker.args["id"] == media_item.id
+ assert worker.priority == 0
end
test "does not enqueue a new worker for the source's media IDs we already know about", %{source: source} do
expect(HTTPClientMock, :get, fn _url -> {:ok, "test_1 "} end)
media_item_fixture(source_id: source.id, media_id: "test_1")
- assert [] = FastIndexingHelpers.kickoff_download_tasks_from_youtube_rss_feed(source)
+ assert [] = FastIndexingHelpers.index_and_kickoff_downloads(source)
refute_enqueued(worker: MediaDownloadWorker)
end
+ test "kicks off a download task for all pending media but at a lower priority", %{source: source} do
+ pending_item = media_item_fixture(source_id: source.id, media_filepath: nil)
+ expect(HTTPClientMock, :get, fn _url -> {:ok, "test_1 "} end)
+
+ assert [%MediaItem{}] = FastIndexingHelpers.index_and_kickoff_downloads(source)
+
+ assert [worker_1, _worker_2] = all_enqueued(worker: MediaDownloadWorker)
+ assert worker_1.args["id"] == pending_item.id
+ assert worker_1.priority == 1
+ end
+
test "returns the found media items", %{source: source} do
expect(HTTPClientMock, :get, fn _url -> {:ok, "test_1 "} end)
- assert [%MediaItem{}] = FastIndexingHelpers.kickoff_download_tasks_from_youtube_rss_feed(source)
+ assert [%MediaItem{}] = FastIndexingHelpers.index_and_kickoff_downloads(source)
end
test "does not enqueue a download job if the source does not allow it" do
expect(HTTPClientMock, :get, fn _url -> {:ok, "test_1 "} end)
source = source_fixture(%{download_media: false})
- assert [%MediaItem{}] = FastIndexingHelpers.kickoff_download_tasks_from_youtube_rss_feed(source)
+ assert [%MediaItem{}] = FastIndexingHelpers.index_and_kickoff_downloads(source)
refute_enqueued(worker: MediaDownloadWorker)
end
@@ -56,24 +87,36 @@ defmodule Pinchflat.FastIndexing.FastIndexingHelpersTest do
test "creates a download task record", %{source: source} do
expect(HTTPClientMock, :get, fn _url -> {:ok, "test_1 "} end)
- assert [media_item] = FastIndexingHelpers.kickoff_download_tasks_from_youtube_rss_feed(source)
+ assert [media_item] = FastIndexingHelpers.index_and_kickoff_downloads(source)
assert [_] = Tasks.list_tasks_for(media_item, "MediaDownloadWorker")
end
+ test "passes the source's download options to the yt-dlp runner", %{source: source} do
+ expect(HTTPClientMock, :get, fn _url -> {:ok, "test_1 "} end)
+
+ expect(YtDlpRunnerMock, :run, fn _url, :get_media_attributes, opts, _ot, _addl_opts ->
+ assert {:output, "/tmp/test/media/%(title)S.%(ext)S"} in opts
+ assert {:remux_video, "mp4"} in opts
+ {:ok, media_attributes_return_fixture()}
+ end)
+
+ FastIndexingHelpers.index_and_kickoff_downloads(source)
+ end
+
test "does not enqueue a download job if the media item does not match the format rules" do
expect(HTTPClientMock, :get, fn _url -> {:ok, "test_1 "} end)
profile = media_profile_fixture(%{shorts_behaviour: :exclude})
source = source_fixture(%{media_profile_id: profile.id})
- stub(YtDlpRunnerMock, :run, fn _url, _opts, _ot ->
+ stub(YtDlpRunnerMock, :run, fn _url, :get_media_attributes, _opts, _ot, _addl ->
output =
Phoenix.json_library().encode!(%{
id: "video2",
title: "Video 2",
- webpage_url: "https://example.com/shorts/video2",
- was_live: true,
+ original_url: "https://example.com/shorts/video2",
+ live_status: "is_live",
description: "desc2",
aspect_ratio: 1.67,
duration: 345.67,
@@ -83,7 +126,7 @@ defmodule Pinchflat.FastIndexing.FastIndexingHelpersTest do
{:ok, output}
end)
- assert [%MediaItem{}] = FastIndexingHelpers.kickoff_download_tasks_from_youtube_rss_feed(source)
+ assert [%MediaItem{}] = FastIndexingHelpers.index_and_kickoff_downloads(source)
refute_enqueued(worker: MediaDownloadWorker)
end
@@ -91,25 +134,69 @@ defmodule Pinchflat.FastIndexing.FastIndexingHelpersTest do
test "does not blow up if a media item cannot be created", %{source: source} do
expect(HTTPClientMock, :get, fn _url -> {:ok, "test_1 "} end)
- stub(YtDlpRunnerMock, :run, fn _url, _opts, _ot ->
+ stub(YtDlpRunnerMock, :run, fn _url, :get_media_attributes, _opts, _ot, _addl ->
{:ok, "{}"}
end)
- assert [] = FastIndexingHelpers.kickoff_download_tasks_from_youtube_rss_feed(source)
+ assert [] = FastIndexingHelpers.index_and_kickoff_downloads(source)
end
test "does not blow up if a media item causes a yt-dlp error", %{source: source} do
expect(HTTPClientMock, :get, fn _url -> {:ok, "test_1 "} end)
- stub(YtDlpRunnerMock, :run, fn _url, _opts, _ot ->
+ stub(YtDlpRunnerMock, :run, fn _url, :get_media_attributes, _opts, _ot, _addl ->
{:error, "message", 1}
end)
- assert [] = FastIndexingHelpers.kickoff_download_tasks_from_youtube_rss_feed(source)
+ assert [] = FastIndexingHelpers.index_and_kickoff_downloads(source)
end
end
- describe "kickoff_download_tasks_from_youtube_rss_feed/1 when testing backends" do
+ describe "index_and_kickoff_downloads/1 when testing cookies" do
+ test "sets use_cookies if the source uses cookies" do
+ expect(HTTPClientMock, :get, fn _url -> {:ok, "test_1 "} end)
+
+ stub(YtDlpRunnerMock, :run, fn _url, :get_media_attributes, _opts, _ot, addl ->
+ assert {:use_cookies, true} in addl
+
+ {:ok, media_attributes_return_fixture()}
+ end)
+
+ source = source_fixture(%{cookie_behaviour: :all_operations})
+
+ assert [%MediaItem{}] = FastIndexingHelpers.index_and_kickoff_downloads(source)
+ end
+
+ test "does not set use_cookies if the source uses cookies when needed" do
+ expect(HTTPClientMock, :get, fn _url -> {:ok, "test_1 "} end)
+
+ stub(YtDlpRunnerMock, :run, fn _url, :get_media_attributes, _opts, _ot, addl ->
+ assert {:use_cookies, false} in addl
+
+ {:ok, media_attributes_return_fixture()}
+ end)
+
+ source = source_fixture(%{cookie_behaviour: :when_needed})
+
+ assert [%MediaItem{}] = FastIndexingHelpers.index_and_kickoff_downloads(source)
+ end
+
+ test "does not set use_cookies if the source does not use cookies" do
+ expect(HTTPClientMock, :get, fn _url -> {:ok, "test_1 "} end)
+
+ stub(YtDlpRunnerMock, :run, fn _url, :get_media_attributes, _opts, _ot, addl ->
+ assert {:use_cookies, false} in addl
+
+ {:ok, media_attributes_return_fixture()}
+ end)
+
+ source = source_fixture(%{cookie_behaviour: :disabled})
+
+ assert [%MediaItem{}] = FastIndexingHelpers.index_and_kickoff_downloads(source)
+ end
+ end
+
+ describe "index_and_kickoff_downloads/1 when testing backends" do
test "uses the YouTube API if it is enabled", %{source: source} do
expect(HTTPClientMock, :get, fn url, _headers ->
assert url =~ "https://youtube.googleapis.com/youtube/v3/playlistItems"
@@ -119,7 +206,7 @@ defmodule Pinchflat.FastIndexing.FastIndexingHelpersTest do
Settings.set(youtube_api_key: "test_key")
- assert [] = FastIndexingHelpers.kickoff_download_tasks_from_youtube_rss_feed(source)
+ assert [] = FastIndexingHelpers.index_and_kickoff_downloads(source)
end
test "the YouTube API creates records as expected", %{source: source} do
@@ -129,7 +216,7 @@ defmodule Pinchflat.FastIndexing.FastIndexingHelpersTest do
Settings.set(youtube_api_key: "test_key")
- assert [%MediaItem{}] = FastIndexingHelpers.kickoff_download_tasks_from_youtube_rss_feed(source)
+ assert [%MediaItem{}] = FastIndexingHelpers.index_and_kickoff_downloads(source)
end
test "RSS is used as a backup if the API fails", %{source: source} do
@@ -138,7 +225,7 @@ defmodule Pinchflat.FastIndexing.FastIndexingHelpersTest do
Settings.set(youtube_api_key: "test_key")
- assert [%MediaItem{}] = FastIndexingHelpers.kickoff_download_tasks_from_youtube_rss_feed(source)
+ assert [%MediaItem{}] = FastIndexingHelpers.index_and_kickoff_downloads(source)
end
test "RSS is used if the API is not enabled", %{source: source} do
@@ -150,7 +237,7 @@ defmodule Pinchflat.FastIndexing.FastIndexingHelpersTest do
Settings.set(youtube_api_key: nil)
- assert [%MediaItem{}] = FastIndexingHelpers.kickoff_download_tasks_from_youtube_rss_feed(source)
+ assert [%MediaItem{}] = FastIndexingHelpers.index_and_kickoff_downloads(source)
end
end
end
diff --git a/test/pinchflat/fast_indexing/fast_indexing_worker_test.exs b/test/pinchflat/fast_indexing/fast_indexing_worker_test.exs
index 40e3805..154f5ab 100644
--- a/test/pinchflat/fast_indexing/fast_indexing_worker_test.exs
+++ b/test/pinchflat/fast_indexing/fast_indexing_worker_test.exs
@@ -84,7 +84,10 @@ defmodule Pinchflat.FastIndexing.FastIndexingWorkerTest do
source = source_fixture(fast_index: true)
expect(HTTPClientMock, :get, fn _url -> {:ok, "test_1 "} end)
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot -> {:ok, render_metadata(:media_metadata)} end)
+
+ expect(YtDlpRunnerMock, :run, fn _url, :get_media_attributes, _opts, _ot, _addl ->
+ {:ok, render_metadata(:media_metadata)}
+ end)
expect(AppriseRunnerMock, :run, fn servers, opts ->
assert "server_1" = servers
@@ -110,7 +113,11 @@ defmodule Pinchflat.FastIndexing.FastIndexingWorkerTest do
source = source_fixture(fast_index: true, download_media: false)
expect(HTTPClientMock, :get, fn _url -> {:ok, "test_1 "} end)
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot -> {:ok, render_metadata(:media_metadata)} end)
+
+ expect(YtDlpRunnerMock, :run, fn _url, :get_media_attributes, _opts, _ot, _addl ->
+ {:ok, render_metadata(:media_metadata)}
+ end)
+
expect(AppriseRunnerMock, :run, 0, fn _servers, _opts -> {:ok, ""} end)
perform_job(FastIndexingWorker, %{id: source.id})
@@ -120,7 +127,11 @@ defmodule Pinchflat.FastIndexing.FastIndexingWorkerTest do
source = source_fixture(fast_index: true, title_filter_regex: "foobar")
expect(HTTPClientMock, :get, fn _url -> {:ok, "test_1 "} end)
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot -> {:ok, render_metadata(:media_metadata)} end)
+
+ expect(YtDlpRunnerMock, :run, fn _url, :get_media_attributes, _opts, _ot, _addl ->
+ {:ok, render_metadata(:media_metadata)}
+ end)
+
expect(AppriseRunnerMock, :run, 0, fn _servers, _opts -> {:ok, ""} end)
perform_job(FastIndexingWorker, %{id: source.id})
diff --git a/test/pinchflat/fast_indexing/youtube_api_test.exs b/test/pinchflat/fast_indexing/youtube_api_test.exs
index cc310d6..dde4068 100644
--- a/test/pinchflat/fast_indexing/youtube_api_test.exs
+++ b/test/pinchflat/fast_indexing/youtube_api_test.exs
@@ -7,31 +7,67 @@ defmodule Pinchflat.FastIndexing.YoutubeApiTest do
alias Pinchflat.FastIndexing.YoutubeApi
describe "enabled?/0" do
- test "returns true if the user has set a YouTube API key" do
+ test "returns true if the user has set YouTube API keys" do
+ Settings.set(youtube_api_key: "key1, key2")
+ assert YoutubeApi.enabled?()
+ end
+
+ test "returns true with a single API key" do
Settings.set(youtube_api_key: "test_key")
assert YoutubeApi.enabled?()
end
- test "returns false if the user has not set an API key" do
+ test "returns false if the user has not set any API keys" do
Settings.set(youtube_api_key: nil)
+ refute YoutubeApi.enabled?()
+ end
+ test "returns false if only empty or whitespace keys are provided" do
+ Settings.set(youtube_api_key: " , ,")
refute YoutubeApi.enabled?()
end
end
describe "get_recent_media_ids/1" do
setup do
+ case :global.whereis_name(YoutubeApi.KeyIndex) do
+ :undefined -> :ok
+ pid -> Agent.stop(pid)
+ end
+
source = source_fixture()
- Settings.set(youtube_api_key: "test_key")
+ Settings.set(youtube_api_key: "key1, key2")
{:ok, source: source}
end
+ test "rotates through API keys", %{source: source} do
+ expect(HTTPClientMock, :get, fn url, _headers ->
+ assert url =~ "key=key1"
+ {:ok, "{}"}
+ end)
+
+ expect(HTTPClientMock, :get, fn url, _headers ->
+ assert url =~ "key=key2"
+ {:ok, "{}"}
+ end)
+
+ expect(HTTPClientMock, :get, fn url, _headers ->
+ assert url =~ "key=key1"
+ {:ok, "{}"}
+ end)
+
+ # three calls to verify rotation
+ YoutubeApi.get_recent_media_ids(source)
+ YoutubeApi.get_recent_media_ids(source)
+ YoutubeApi.get_recent_media_ids(source)
+ end
+
test "calls the expected URL", %{source: source} do
expect(HTTPClientMock, :get, fn url, headers ->
api_base = "https://youtube.googleapis.com/youtube/v3/playlistItems"
- request_url = "#{api_base}?part=contentDetails&maxResults=50&playlistId=#{source.collection_id}&key=test_key"
+ request_url = "#{api_base}?part=contentDetails&maxResults=50&playlistId=#{source.collection_id}&key=key1"
assert url == request_url
assert headers == [accept: "application/json"]
diff --git a/test/pinchflat/lifecycle/user_scripts/command_runner_test.exs b/test/pinchflat/lifecycle/user_scripts/command_runner_test.exs
index 351f4c4..5a35faf 100644
--- a/test/pinchflat/lifecycle/user_scripts/command_runner_test.exs
+++ b/test/pinchflat/lifecycle/user_scripts/command_runner_test.exs
@@ -19,7 +19,7 @@ defmodule Pinchflat.Lifecycle.UserScripts.CommandRunnerTest do
File.write(filepath(), "#!/bin/bash\ntouch #{filename}\n")
refute File.exists?(filename)
- assert :ok = Runner.run(:media_downloaded, %{})
+ assert {:ok, _, _} = Runner.run(:media_downloaded, %{})
assert File.exists?(filename)
end
@@ -27,7 +27,7 @@ defmodule Pinchflat.Lifecycle.UserScripts.CommandRunnerTest do
tmp_dir = Application.get_env(:pinchflat, :tmpfile_directory)
File.write(filepath(), "#!/bin/bash\necho $1 > #{tmp_dir}/event_name\n")
- assert :ok = Runner.run(:media_downloaded, %{})
+ assert {:ok, _, _} = Runner.run(:media_downloaded, %{})
assert File.read!("#{tmp_dir}/event_name") == "media_downloaded\n"
end
@@ -35,26 +35,32 @@ defmodule Pinchflat.Lifecycle.UserScripts.CommandRunnerTest do
tmp_dir = Application.get_env(:pinchflat, :tmpfile_directory)
File.write(filepath(), "#!/bin/bash\necho $2 > #{tmp_dir}/encoded_data\n")
- assert :ok = Runner.run(:media_downloaded, %{foo: "bar"})
+ assert {:ok, _, _} = Runner.run(:media_downloaded, %{foo: "bar"})
assert File.read!("#{tmp_dir}/encoded_data") == "{\"foo\":\"bar\"}\n"
end
test "does nothing if the lifecycle file is not present" do
:ok = File.rm(filepath())
- assert :ok = Runner.run(:media_downloaded, %{})
+ assert {:ok, :no_executable} = Runner.run(:media_downloaded, %{})
end
test "does nothing if the lifecycle file is empty" do
File.write(filepath(), "")
- assert :ok = Runner.run(:media_downloaded, %{})
+ assert {:ok, :no_executable} = Runner.run(:media_downloaded, %{})
end
test "returns :ok if the command exits with a non-zero status" do
File.write(filepath(), "#!/bin/bash\nexit 1\n")
- assert :ok = Runner.run(:media_downloaded, %{})
+ assert {:ok, _, 1} = Runner.run(:media_downloaded, %{})
+ end
+
+ test "returns the output of the command" do
+ File.write(filepath(), "#!/bin/bash\necho 'hello'\n")
+
+ assert {:ok, "hello\n", 0} = Runner.run(:media_downloaded, %{})
end
test "gets upset if you pass an invalid event type" do
diff --git a/test/pinchflat/media/file_syncing_test.exs b/test/pinchflat/media/file_syncing_test.exs
new file mode 100644
index 0000000..c69c795
--- /dev/null
+++ b/test/pinchflat/media/file_syncing_test.exs
@@ -0,0 +1,122 @@
+defmodule Pinchflat.Media.FileSyncingTest do
+ use Pinchflat.DataCase
+
+ import Pinchflat.MediaFixtures
+
+ alias Pinchflat.Media.FileSyncing
+
+ describe "delete_outdated_files/2" do
+ test "deletes outdated non-subtitle files" do
+ new_media_item = media_item_with_attachments()
+ old_media_item = media_item_with_attachments()
+
+ assert :ok = FileSyncing.delete_outdated_files(old_media_item, new_media_item)
+
+ assert File.exists?(new_media_item.media_filepath)
+ refute File.exists?(old_media_item.media_filepath)
+ end
+
+ test "doesn't delete non-subtitle files if the new file is the same" do
+ new_media_item = media_item_with_attachments()
+ old_media_item = media_item_fixture(%{media_filepath: new_media_item.media_filepath})
+
+ assert :ok = FileSyncing.delete_outdated_files(old_media_item, new_media_item)
+
+ assert File.exists?(new_media_item.media_filepath)
+ assert File.exists?(old_media_item.media_filepath)
+ end
+
+ test "doesn't delete the old file if the new file is missing that key" do
+ new_media_item = media_item_fixture(%{media_filepath: nil})
+ old_media_item = media_item_with_attachments()
+
+ assert :ok = FileSyncing.delete_outdated_files(old_media_item, new_media_item)
+
+ assert File.exists?(old_media_item.media_filepath)
+ end
+
+ test "deletes outdated subtitle files" do
+ new_media_item = media_item_with_attachments()
+ old_media_item = media_item_with_attachments()
+
+ assert :ok = FileSyncing.delete_outdated_files(old_media_item, new_media_item)
+
+ assert File.exists?(get_subtitle_filepath(new_media_item, "en"))
+ refute File.exists?(get_subtitle_filepath(old_media_item, "en"))
+ end
+
+ test "keeps old subtitle files if the new file is the same" do
+ new_media_item = media_item_with_attachments()
+ old_media_item = media_item_fixture(%{subtitle_filepaths: new_media_item.subtitle_filepaths})
+
+ assert :ok = FileSyncing.delete_outdated_files(old_media_item, new_media_item)
+
+ assert File.exists?(get_subtitle_filepath(new_media_item, "en"))
+ assert File.exists?(get_subtitle_filepath(old_media_item, "en"))
+ end
+
+ test "doesn't delete old subtitle files if the new file is missing that key" do
+ new_media_item = media_item_fixture(%{subtitle_filepaths: []})
+ old_media_item = media_item_with_attachments()
+
+ assert :ok = FileSyncing.delete_outdated_files(old_media_item, new_media_item)
+
+ assert File.exists?(get_subtitle_filepath(old_media_item, "en"))
+ end
+ end
+
+ describe "sync_file_presence_on_disk/1" do
+ test "removes attributes whose files are missing" do
+ media_item = media_item_fixture(%{media_filepath: "/tmp/missing_file.mp4"})
+
+ assert media_item.media_filepath
+ assert [updated_media_item] = FileSyncing.sync_file_presence_on_disk([media_item])
+ refute updated_media_item.media_filepath
+ end
+
+ test "doesn't remove attributes where the files still exist" do
+ media_item = media_item_with_attachments()
+
+ assert media_item.media_filepath
+ assert [updated_media_item] = FileSyncing.sync_file_presence_on_disk([media_item])
+ assert updated_media_item.media_filepath
+ end
+
+ test "doesn't touch other attributes if some are missing and some aren't" do
+ media_item = media_item_with_attachments()
+ File.rm(media_item.media_filepath)
+
+ assert media_item.thumbnail_filepath
+ assert media_item.media_filepath
+ assert [updated_media_item] = FileSyncing.sync_file_presence_on_disk([media_item])
+ assert updated_media_item.thumbnail_filepath
+ refute updated_media_item.media_filepath
+ end
+
+ test "removes subtitle files that are missing" do
+ media_item = media_item_fixture(%{subtitle_filepaths: [["en", "/tmp/missing_file.srt"]]})
+
+ assert get_subtitle_filepath(media_item, "en")
+ assert [updated_media_item] = FileSyncing.sync_file_presence_on_disk([media_item])
+ refute get_subtitle_filepath(updated_media_item, "en")
+ end
+
+ test "doesn't remove subtitle files that still exist" do
+ media_item = media_item_with_attachments()
+
+ assert get_subtitle_filepath(media_item, "en")
+ assert [updated_media_item] = FileSyncing.sync_file_presence_on_disk([media_item])
+ assert get_subtitle_filepath(updated_media_item, "en")
+ end
+ end
+
+ defp get_subtitle_filepath(media_item, language) do
+ Enum.reduce_while(media_item.subtitle_filepaths, nil, fn [lang, filepath], acc ->
+ if lang == language do
+ {:halt, filepath}
+ else
+ {:cont, acc}
+ end
+ end)
+ end
+end
diff --git a/test/pinchflat/media/file_syncing_worker_test.exs b/test/pinchflat/media/file_syncing_worker_test.exs
new file mode 100644
index 0000000..37ebafa
--- /dev/null
+++ b/test/pinchflat/media/file_syncing_worker_test.exs
@@ -0,0 +1,37 @@
+defmodule Pinchflat.Media.FileSyncingWorkerTest do
+ use Pinchflat.DataCase
+
+ import Pinchflat.MediaFixtures
+ import Pinchflat.SourcesFixtures
+
+ alias Pinchflat.Media.FileSyncingWorker
+
+ describe "kickoff_with_task/3" do
+ test "starts the worker" do
+ source = source_fixture()
+
+ assert [] = all_enqueued(worker: FileSyncingWorker)
+ assert {:ok, _} = FileSyncingWorker.kickoff_with_task(source)
+ assert [_] = all_enqueued(worker: FileSyncingWorker)
+ end
+
+ test "attaches a task" do
+ source = source_fixture()
+
+ assert {:ok, task} = FileSyncingWorker.kickoff_with_task(source)
+ assert task.source_id == source.id
+ end
+ end
+
+ describe "perform/1" do
+ test "syncs file presence on disk" do
+ source = source_fixture()
+ media_item = media_item_fixture(%{media_filepath: "/tmp/missing.mp4", source_id: source.id})
+
+ perform_job(FileSyncingWorker, %{"id" => source.id})
+ updated_media_item = Repo.reload!(media_item)
+
+ refute updated_media_item.media_filepath
+ end
+ end
+end
diff --git a/test/pinchflat/media_test.exs b/test/pinchflat/media_test.exs
index 13bfb8b..6becd5b 100644
--- a/test/pinchflat/media_test.exs
+++ b/test/pinchflat/media_test.exs
@@ -41,99 +41,7 @@ defmodule Pinchflat.MediaTest do
end
end
- describe "list_cullable_media_items/0" do
- test "returns media items where the source has a retention period" do
- source_one = source_fixture(%{retention_period_days: 2})
- source_two = source_fixture(%{retention_period_days: 0})
- source_three = source_fixture(%{retention_period_days: nil})
-
- _media_item =
- media_item_fixture(%{
- source_id: source_two.id,
- media_filepath: "/video/#{Faker.File.file_name(:video)}",
- media_downloaded_at: now_minus(3, :days)
- })
-
- _media_item =
- media_item_fixture(%{
- source_id: source_three.id,
- media_filepath: "/video/#{Faker.File.file_name(:video)}",
- media_downloaded_at: now_minus(3, :days)
- })
-
- expected_media_item =
- media_item_fixture(%{
- source_id: source_one.id,
- media_filepath: "/video/#{Faker.File.file_name(:video)}",
- media_downloaded_at: now_minus(3, :days)
- })
-
- assert Media.list_cullable_media_items() == [expected_media_item]
- end
-
- test "returns media_items with a media_filepath" do
- source = source_fixture(%{retention_period_days: 2})
-
- _media_item =
- media_item_fixture(%{
- source_id: source.id,
- media_filepath: nil,
- media_downloaded_at: now_minus(3, :days)
- })
-
- expected_media_item =
- media_item_fixture(%{
- source_id: source.id,
- media_filepath: "/video/#{Faker.File.file_name(:video)}",
- media_downloaded_at: now_minus(3, :days)
- })
-
- assert Media.list_cullable_media_items() == [expected_media_item]
- end
-
- test "returns items that have passed their retention period" do
- source = source_fixture(%{retention_period_days: 2})
-
- _media_item =
- media_item_fixture(%{
- source_id: source.id,
- media_filepath: "/video/#{Faker.File.file_name(:video)}",
- media_downloaded_at: now_minus(2, :days)
- })
-
- expected_media_item =
- media_item_fixture(%{
- source_id: source.id,
- media_filepath: "/video/#{Faker.File.file_name(:video)}",
- media_downloaded_at: now_minus(3, :days)
- })
-
- assert Media.list_cullable_media_items() == [expected_media_item]
- end
-
- test "doesn't return items that are set to prevent culling" do
- source = source_fixture(%{retention_period_days: 2})
-
- _media_item =
- media_item_fixture(%{
- source_id: source.id,
- media_filepath: "/video/#{Faker.File.file_name(:video)}",
- media_downloaded_at: now_minus(3, :days),
- prevent_culling: true
- })
-
- expected_media_item =
- media_item_fixture(%{
- source_id: source.id,
- media_filepath: "/video/#{Faker.File.file_name(:video)}",
- media_downloaded_at: now_minus(3, :days)
- })
-
- assert Media.list_cullable_media_items() == [expected_media_item]
- end
- end
-
- describe "list_redownloadable_media_items/0" do
+ describe "list_upgradeable_media_items/0" do
setup do
media_profile = media_profile_fixture(%{redownload_delay_days: 4})
source = source_fixture(%{media_profile_id: media_profile.id, inserted_at: now_minus(10, :days)})
@@ -149,7 +57,7 @@ defmodule Pinchflat.MediaTest do
media_downloaded_at: now_minus(5, :days)
})
- assert Media.list_redownloadable_media_items() == [media_item]
+ assert Media.list_upgradeable_media_items() == [media_item]
end
test "returns media items that were downloaded in past but still meet redownload delay", %{source: source} do
@@ -160,7 +68,7 @@ defmodule Pinchflat.MediaTest do
media_downloaded_at: now_minus(19, :days)
})
- assert Media.list_redownloadable_media_items() == [media_item]
+ assert Media.list_upgradeable_media_items() == [media_item]
end
test "does not return media items without a media_downloaded_at", %{source: source} do
@@ -171,7 +79,7 @@ defmodule Pinchflat.MediaTest do
media_downloaded_at: nil
})
- assert Media.list_redownloadable_media_items() == []
+ assert Media.list_upgradeable_media_items() == []
end
test "does not return media items that are set to prevent download", %{source: source} do
@@ -183,7 +91,7 @@ defmodule Pinchflat.MediaTest do
prevent_download: true
})
- assert Media.list_redownloadable_media_items() == []
+ assert Media.list_upgradeable_media_items() == []
end
test "does not return media items that have been culled", %{source: source} do
@@ -195,7 +103,7 @@ defmodule Pinchflat.MediaTest do
culled_at: now()
})
- assert Media.list_redownloadable_media_items() == []
+ assert Media.list_upgradeable_media_items() == []
end
test "does not return media items before the download delay", %{source: source} do
@@ -206,7 +114,7 @@ defmodule Pinchflat.MediaTest do
media_downloaded_at: now_minus(3, :days)
})
- assert Media.list_redownloadable_media_items() == []
+ assert Media.list_upgradeable_media_items() == []
end
test "does not return media items that have already been redownloaded", %{source: source} do
@@ -218,7 +126,7 @@ defmodule Pinchflat.MediaTest do
media_redownloaded_at: now()
})
- assert Media.list_redownloadable_media_items() == []
+ assert Media.list_upgradeable_media_items() == []
end
test "does not return media items that were first downloaded well after the uploaded_at", %{source: source} do
@@ -229,7 +137,7 @@ defmodule Pinchflat.MediaTest do
uploaded_at: now_minus(20, :days)
})
- assert Media.list_redownloadable_media_items() == []
+ assert Media.list_upgradeable_media_items() == []
end
test "does not return media items that were recently uploaded", %{source: source} do
@@ -240,7 +148,7 @@ defmodule Pinchflat.MediaTest do
uploaded_at: now_minus(2, :days)
})
- assert Media.list_redownloadable_media_items() == []
+ assert Media.list_upgradeable_media_items() == []
end
test "does not return media items without a redownload delay" do
@@ -254,7 +162,7 @@ defmodule Pinchflat.MediaTest do
media_downloaded_at: now_minus(5, :days)
})
- assert Media.list_redownloadable_media_items() == []
+ assert Media.list_upgradeable_media_items() == []
end
end
@@ -447,6 +355,48 @@ defmodule Pinchflat.MediaTest do
end
end
+ describe "list_pending_media_items_for/1 when min and max durations" do
+ test "returns media items that meet the min and max duration" do
+ source = source_fixture(%{min_duration_seconds: 10, max_duration_seconds: 20})
+
+ _short_media_item = media_item_fixture(%{source_id: source.id, media_filepath: nil, duration_seconds: 5})
+ normal_media_item = media_item_fixture(%{source_id: source.id, media_filepath: nil, duration_seconds: 15})
+ _long_media_item = media_item_fixture(%{source_id: source.id, media_filepath: nil, duration_seconds: 25})
+
+ assert Media.list_pending_media_items_for(source) == [normal_media_item]
+ end
+
+ test "does not apply a min duration if none is specified" do
+ source = source_fixture(%{min_duration_seconds: nil, max_duration_seconds: 20})
+
+ short_media_item = media_item_fixture(%{source_id: source.id, media_filepath: nil, duration_seconds: 5})
+ normal_media_item = media_item_fixture(%{source_id: source.id, media_filepath: nil, duration_seconds: 15})
+ _long_media_item = media_item_fixture(%{source_id: source.id, media_filepath: nil, duration_seconds: 25})
+
+ assert Media.list_pending_media_items_for(source) == [short_media_item, normal_media_item]
+ end
+
+ test "does not apply a max duration if none is specified" do
+ source = source_fixture(%{min_duration_seconds: 10, max_duration_seconds: nil})
+
+ _short_media_item = media_item_fixture(%{source_id: source.id, media_filepath: nil, duration_seconds: 5})
+ normal_media_item = media_item_fixture(%{source_id: source.id, media_filepath: nil, duration_seconds: 15})
+ long_media_item = media_item_fixture(%{source_id: source.id, media_filepath: nil, duration_seconds: 25})
+
+ assert Media.list_pending_media_items_for(source) == [normal_media_item, long_media_item]
+ end
+
+ test "does not apply a min or max duration if none are specified" do
+ source = source_fixture(%{min_duration_seconds: nil, max_duration_seconds: nil})
+
+ short_media_item = media_item_fixture(%{source_id: source.id, media_filepath: nil, duration_seconds: 5})
+ normal_media_item = media_item_fixture(%{source_id: source.id, media_filepath: nil, duration_seconds: 15})
+ long_media_item = media_item_fixture(%{source_id: source.id, media_filepath: nil, duration_seconds: 25})
+
+ assert Media.list_pending_media_items_for(source) == [short_media_item, normal_media_item, long_media_item]
+ end
+ end
+
describe "list_pending_media_items_for/1 when testing download prevention" do
test "returns only media items that are not prevented from downloading" do
source = source_fixture()
@@ -491,6 +441,13 @@ defmodule Pinchflat.MediaTest do
assert Media.pending_download?(media_item)
end
+ test "returns true if the cutoff date is equal to the upload date" do
+ source = source_fixture(%{download_cutoff_date: now_minus(2, :days)})
+ media_item = media_item_fixture(%{source_id: source.id, media_filepath: nil, uploaded_at: now_minus(2, :days)})
+
+ assert Media.pending_download?(media_item)
+ end
+
test "returns false if there is a cutoff date after the media's upload date" do
source = source_fixture(%{download_cutoff_date: now_minus(1, :day)})
media_item = media_item_fixture(%{source_id: source.id, media_filepath: nil, uploaded_at: now_minus(2, :days)})
@@ -526,6 +483,34 @@ defmodule Pinchflat.MediaTest do
assert Media.pending_download?(media_item)
end
+ test "returns true if the duration is between the min and max" do
+ source = source_fixture(%{min_duration_seconds: 10, max_duration_seconds: 20})
+ media_item = media_item_fixture(%{source_id: source.id, media_filepath: nil, duration_seconds: 15})
+
+ assert Media.pending_download?(media_item)
+ end
+
+ test "returns false if the duration is below the min" do
+ source = source_fixture(%{min_duration_seconds: 10, max_duration_seconds: 20})
+ media_item = media_item_fixture(%{source_id: source.id, media_filepath: nil, duration_seconds: 5})
+
+ refute Media.pending_download?(media_item)
+ end
+
+ test "returns false if the duration is above the max" do
+ source = source_fixture(%{min_duration_seconds: 10, max_duration_seconds: 20})
+ media_item = media_item_fixture(%{source_id: source.id, media_filepath: nil, duration_seconds: 25})
+
+ refute Media.pending_download?(media_item)
+ end
+
+ test "returns true if there is no min or max duration" do
+ source = source_fixture(%{min_duration_seconds: nil, max_duration_seconds: nil})
+ media_item = media_item_fixture(%{source_id: source.id, media_filepath: nil, duration_seconds: 15})
+
+ assert Media.pending_download?(media_item)
+ end
+
test "returns true if the media item is not prevented from downloading" do
media_item = media_item_fixture(%{media_filepath: nil, prevent_download: false})
@@ -689,7 +674,24 @@ defmodule Pinchflat.MediaTest do
assert {:ok, %MediaItem{} = media_item_2} = Media.create_media_item_from_backend_attrs(source, different_attrs)
assert media_item_1.id == media_item_2.id
- assert media_item_2.title == different_attrs.title
+ assert Repo.reload(media_item_2).title == different_attrs.title
+ end
+
+ test "doesn't update fields like playlist_index" do
+ source = source_fixture()
+
+ media_attrs =
+ media_attributes_return_fixture()
+ |> Phoenix.json_library().decode!()
+ |> Map.put("playlist_index", 1)
+ |> YtDlpMedia.response_to_struct()
+
+ different_attrs = %YtDlpMedia{media_attrs | playlist_index: 9999}
+
+ assert {:ok, %MediaItem{} = _media_item_1} = Media.create_media_item_from_backend_attrs(source, media_attrs)
+ assert {:ok, %MediaItem{} = media_item_2} = Media.create_media_item_from_backend_attrs(source, different_attrs)
+
+ assert Repo.reload(media_item_2).playlist_index == media_attrs.playlist_index
end
end
@@ -710,6 +712,21 @@ defmodule Pinchflat.MediaTest do
assert media_item.media_filepath == update_attrs.media_filepath
end
+ test "updating strips playlist_index from the provided attrs" do
+ media_item = media_item_fixture(playlist_index: 5)
+
+ update_attrs = %{
+ media_id: Faker.String.base64(12),
+ title: Faker.Commerce.product_name(),
+ media_filepath: "/video/#{Faker.File.file_name(:video)}",
+ source_id: source_fixture().id,
+ playlist_index: 1
+ }
+
+ assert {:ok, %MediaItem{} = media_item} = Media.update_media_item(media_item, update_attrs)
+ assert media_item.playlist_index == 5
+ end
+
test "updating with invalid data returns error changeset" do
media_item = media_item_fixture()
assert {:error, %Ecto.Changeset{}} = Media.update_media_item(media_item, @invalid_attrs)
@@ -740,8 +757,8 @@ defmodule Pinchflat.MediaTest do
end
test "does delete the media item's metadata files" do
- stub(YtDlpRunnerMock, :run, fn _url, _opts, _ot -> {:ok, ""} end)
- media_item = Repo.preload(media_item_with_attachments(), :metadata)
+ stub(YtDlpRunnerMock, :run, fn _url, :download_thumbnail, _opts, _ot, _addl -> {:ok, ""} end)
+ media_item = Repo.preload(media_item_with_attachments(), [:metadata, :source])
update_attrs = %{
metadata: %{
@@ -759,7 +776,7 @@ defmodule Pinchflat.MediaTest do
describe "delete_media_item/2 when testing file deletion" do
setup do
- stub(UserScriptRunnerMock, :run, fn _event_type, _data -> :ok end)
+ stub(UserScriptRunnerMock, :run, fn _event_type, _data -> {:ok, "", 0} end)
:ok
end
@@ -772,8 +789,8 @@ defmodule Pinchflat.MediaTest do
end
test "deletes the media item's metadata files" do
- stub(YtDlpRunnerMock, :run, fn _url, _opts, _ot -> {:ok, ""} end)
- media_item = Repo.preload(media_item_with_attachments(), :metadata)
+ stub(YtDlpRunnerMock, :run, fn _url, :download_thumbnail, _opts, _ot, _addl -> {:ok, ""} end)
+ media_item = Repo.preload(media_item_with_attachments(), [:metadata, :source])
update_attrs = %{
metadata: %{
@@ -820,7 +837,7 @@ defmodule Pinchflat.MediaTest do
expect(UserScriptRunnerMock, :run, fn :media_deleted, data ->
assert data.id == media_item.id
- :ok
+ {:ok, "", 0}
end)
assert {:ok, _} = Media.delete_media_item(media_item, delete_files: true)
@@ -829,7 +846,7 @@ defmodule Pinchflat.MediaTest do
describe "delete_media_files/2" do
setup do
- stub(UserScriptRunnerMock, :run, fn _event_type, _data -> :ok end)
+ stub(UserScriptRunnerMock, :run, fn _event_type, _data -> {:ok, "", 0} end)
:ok
end
@@ -858,8 +875,8 @@ defmodule Pinchflat.MediaTest do
end
test "does not delete the media item's metadata files" do
- stub(YtDlpRunnerMock, :run, fn _url, _opts, _ot -> {:ok, ""} end)
- media_item = Repo.preload(media_item_with_attachments(), :metadata)
+ stub(YtDlpRunnerMock, :run, fn _url, :download_thumbnail, _opts, _ot, _addl -> {:ok, ""} end)
+ media_item = Repo.preload(media_item_with_attachments(), [:metadata, :source])
update_attrs = %{
metadata: %{
@@ -892,7 +909,7 @@ defmodule Pinchflat.MediaTest do
expect(UserScriptRunnerMock, :run, fn :media_deleted, data ->
assert data.id == media_item.id
- :ok
+ {:ok, "", 0}
end)
assert {:ok, _} = Media.delete_media_files(media_item)
@@ -904,6 +921,14 @@ defmodule Pinchflat.MediaTest do
media_item = media_item_fixture()
assert %Ecto.Changeset{} = Media.change_media_item(media_item)
end
+
+ test "validates the title doesn't start with 'youtube video #'" do
+ # This is to account for youtube restricting indexing. See issue #549 for more
+ media_item = media_item_fixture()
+
+ assert %Ecto.Changeset{valid?: false} = Media.change_media_item(media_item, %{title: "youtube video #123"})
+ assert %Ecto.Changeset{valid?: true} = Media.change_media_item(media_item, %{title: "any other title"})
+ end
end
describe "change_media_item/1 when testing upload_date_index and source is a channel" do
diff --git a/test/pinchflat/metadata/metadata_file_helpers_test.exs b/test/pinchflat/metadata/metadata_file_helpers_test.exs
index 36db3cd..bacb861 100644
--- a/test/pinchflat/metadata/metadata_file_helpers_test.exs
+++ b/test/pinchflat/metadata/metadata_file_helpers_test.exs
@@ -2,15 +2,26 @@ defmodule Pinchflat.Metadata.MetadataFileHelpersTest do
use Pinchflat.DataCase
import Pinchflat.MediaFixtures
+ import Pinchflat.SourcesFixtures
alias Pinchflat.Metadata.MetadataFileHelpers, as: Helpers
setup do
- media_item = media_item_fixture()
+ media_item = Repo.preload(media_item_fixture(), :source)
{:ok, %{media_item: media_item}}
end
+ describe "metadata_directory_for/1" do
+ test "returns the metadata directory for the given record", %{media_item: media_item} do
+ base_metadata_directory = Application.get_env(:pinchflat, :metadata_directory)
+
+ metadata_directory = Helpers.metadata_directory_for(media_item)
+
+ assert metadata_directory == Path.join([base_metadata_directory, "media_items", "#{media_item.id}"])
+ end
+ end
+
describe "compress_and_store_metadata_for/2" do
test "returns the filepath", %{media_item: media_item} do
metadata_map = %{"foo" => "bar"}
@@ -51,7 +62,7 @@ defmodule Pinchflat.Metadata.MetadataFileHelpersTest do
describe "download_and_store_thumbnail_for/2" do
test "returns the filepath", %{media_item: media_item} do
- stub(YtDlpRunnerMock, :run, fn _url, _opts, _ot -> {:ok, ""} end)
+ stub(YtDlpRunnerMock, :run, fn _url, :download_thumbnail, _opts, _ot, _addl -> {:ok, ""} end)
filepath = Helpers.download_and_store_thumbnail_for(media_item)
@@ -59,7 +70,7 @@ defmodule Pinchflat.Metadata.MetadataFileHelpersTest do
end
test "calls yt-dlp with the expected options", %{media_item: media_item} do
- expect(YtDlpRunnerMock, :run, fn url, opts, ot ->
+ expect(YtDlpRunnerMock, :run, fn url, :download_thumbnail, opts, ot, _addl ->
assert url == media_item.original_url
assert ot == "after_move:%()j"
@@ -78,7 +89,7 @@ defmodule Pinchflat.Metadata.MetadataFileHelpersTest do
end
test "returns nil if yt-dlp fails", %{media_item: media_item} do
- stub(YtDlpRunnerMock, :run, fn _url, _opts, _ot -> {:error, "error"} end)
+ stub(YtDlpRunnerMock, :run, fn _url, :download_thumbnail, _opts, _ot, _addl -> {:error, "error"} end)
filepath = Helpers.download_and_store_thumbnail_for(media_item)
@@ -86,6 +97,44 @@ defmodule Pinchflat.Metadata.MetadataFileHelpersTest do
end
end
+ describe "download_and_store_thumbnail_for/2 when testing cookie usage" do
+ test "sets use_cookies if the source uses cookies" do
+ expect(YtDlpRunnerMock, :run, fn _url, :download_thumbnail, _opts, _ot, addl ->
+ assert {:use_cookies, true} in addl
+ {:ok, ""}
+ end)
+
+ source = source_fixture(%{cookie_behaviour: :all_operations})
+ media_item = Repo.preload(media_item_fixture(%{source_id: source.id}), :source)
+
+ Helpers.download_and_store_thumbnail_for(media_item)
+ end
+
+ test "does not set use_cookies if the source uses cookies when needed" do
+ expect(YtDlpRunnerMock, :run, fn _url, :download_thumbnail, _opts, _ot, addl ->
+ assert {:use_cookies, false} in addl
+ {:ok, ""}
+ end)
+
+ source = source_fixture(%{cookie_behaviour: :when_needed})
+ media_item = Repo.preload(media_item_fixture(%{source_id: source.id}), :source)
+
+ Helpers.download_and_store_thumbnail_for(media_item)
+ end
+
+ test "does not set use_cookies if the source does not use cookies" do
+ expect(YtDlpRunnerMock, :run, fn _url, :download_thumbnail, _opts, _ot, addl ->
+ assert {:use_cookies, false} in addl
+ {:ok, ""}
+ end)
+
+ source = source_fixture(%{cookie_behaviour: :disabled})
+ media_item = Repo.preload(media_item_fixture(%{source_id: source.id}), :source)
+
+ Helpers.download_and_store_thumbnail_for(media_item)
+ end
+ end
+
describe "parse_upload_date/1" do
test "returns a datetime from the given metadata upload date" do
upload_date = "20210101"
@@ -142,13 +191,20 @@ defmodule Pinchflat.Metadata.MetadataFileHelpersTest do
end
end
- describe "metadata_directory_for/1" do
- test "returns the metadata directory for the given record", %{media_item: media_item} do
- base_metadata_directory = Application.get_env(:pinchflat, :metadata_directory)
+ describe "season_and_episode_from_media_filepath/1" do
+ test "returns a season and episode if one can be determined" do
+ assert {:ok, {"1", "2"}} = Helpers.season_and_episode_from_media_filepath("/foo/s1e2 - test.mp4")
+ assert {:ok, {"1", "2"}} = Helpers.season_and_episode_from_media_filepath("/foo/S1E2 - test.mp4")
+ assert {:ok, {"001", "002"}} = Helpers.season_and_episode_from_media_filepath("/foo/s001e002 - test.mp4")
+ assert {:ok, {"1", "2"}} = Helpers.season_and_episode_from_media_filepath("/foo/s1e2bar - test.mp4")
+ assert {:ok, {"1", "2"}} = Helpers.season_and_episode_from_media_filepath("/foo/bar s1e2 - test.mp4")
+ end
- metadata_directory = Helpers.metadata_directory_for(media_item)
-
- assert metadata_directory == Path.join([base_metadata_directory, "media_items", "#{media_item.id}"])
+ test "returns an error if a season and episode can't be determined" do
+ assert {:error, :indeterminable} = Helpers.season_and_episode_from_media_filepath("/foo/test.mp4")
+ assert {:error, :indeterminable} = Helpers.season_and_episode_from_media_filepath("/foo/s1 - test.mp4")
+ assert {:error, :indeterminable} = Helpers.season_and_episode_from_media_filepath("/foo/s1e - test.mp4")
+ assert {:error, :indeterminable} = Helpers.season_and_episode_from_media_filepath("/foo/s1etest.mp4")
end
end
end
diff --git a/test/pinchflat/metadata/metadata_parser_test.exs b/test/pinchflat/metadata/metadata_parser_test.exs
index 48015b1..c3eca80 100644
--- a/test/pinchflat/metadata/metadata_parser_test.exs
+++ b/test/pinchflat/metadata/metadata_parser_test.exs
@@ -43,11 +43,12 @@ defmodule Pinchflat.Metadata.MetadataParserTest do
test "it extracts the livestream flag", %{metadata: metadata} do
result = Parser.parse_for_media_item(metadata)
- assert result.livestream == metadata["was_live"]
+ assert metadata["live_status"] == "not_live"
+ refute result.livestream
end
test "the livestream flag defaults to false", %{metadata: metadata} do
- metadata = Map.put(metadata, "was_live", nil)
+ metadata = Map.put(metadata, "live_status", nil)
result = Parser.parse_for_media_item(metadata)
diff --git a/test/pinchflat/metadata/nfo_builder_test.exs b/test/pinchflat/metadata/nfo_builder_test.exs
index c8b032a..47217c7 100644
--- a/test/pinchflat/metadata/nfo_builder_test.exs
+++ b/test/pinchflat/metadata/nfo_builder_test.exs
@@ -5,7 +5,7 @@ defmodule Pinchflat.Metadata.NfoBuilderTest do
alias Pinchflat.Utils.FilesystemUtils
setup do
- filepath = FilesystemUtils.generate_metadata_tmpfile(:json)
+ filepath = FilesystemUtils.generate_metadata_tmpfile(:nfo)
on_exit(fn -> File.rm!(filepath) end)
@@ -45,6 +45,43 @@ defmodule Pinchflat.Metadata.NfoBuilderTest do
assert String.contains?(nfo, "hello' & <world>")
end
+
+ test "uses the season and episode number from the filepath if it can be determined" do
+ metadata = %{
+ "title" => "title",
+ "uploader" => "uploader",
+ "id" => "id",
+ "description" => "description",
+ "upload_date" => "20210101"
+ }
+
+ tmpfile_directory = Application.get_env(:pinchflat, :tmpfile_directory)
+ filepath = Path.join([tmpfile_directory, "foo/s0123e456.nfo"])
+
+ result = NfoBuilder.build_and_store_for_media_item(filepath, metadata)
+ nfo = File.read!(result)
+
+ assert String.contains?(nfo, "0123 ")
+ assert String.contains?(nfo, "456 ")
+
+ File.rm!(filepath)
+ end
+
+ test "uses the upload date if the season and episode number can't be determined", %{filepath: filepath} do
+ metadata = %{
+ "title" => "title",
+ "uploader" => "uploader",
+ "id" => "id",
+ "description" => "description",
+ "upload_date" => "20210101"
+ }
+
+ result = NfoBuilder.build_and_store_for_media_item(filepath, metadata)
+ nfo = File.read!(result)
+
+ assert String.contains?(nfo, "2021 ")
+ assert String.contains?(nfo, "0101 ")
+ end
end
describe "build_and_store_for_source/2" do
diff --git a/test/pinchflat/metadata/source_image_parser_test.exs b/test/pinchflat/metadata/source_image_parser_test.exs
index 8ec3c6c..3f815ae 100644
--- a/test/pinchflat/metadata/source_image_parser_test.exs
+++ b/test/pinchflat/metadata/source_image_parser_test.exs
@@ -75,4 +75,51 @@ defmodule Pinchflat.Metadata.SourceImageParserTest do
assert SourceImageParser.store_source_images(@base_dir, metadata) == %{}
end
end
+
+ describe "store_source_images/2 when testing fallbacks" do
+ test "uses the entries list for a fallback poster if needed" do
+ metadata = %{
+ "thumbnails" => [],
+ "entries" => [
+ %{
+ "thumbnails" => [%{"filepath" => "/app/test/support/files/channel_photos/a.0.jpg"}]
+ }
+ ]
+ }
+
+ expected = %{
+ poster_filepath: "#{@base_dir}/poster.jpg"
+ }
+
+ assert SourceImageParser.store_source_images(@base_dir, metadata) == expected
+ end
+
+ test "doesn't blow up if the entries list doesn't have any suitable thumbnails" do
+ metadata = %{
+ "thumbnails" => [],
+ "entries" => [
+ %{"thumbnails" => [%{"id" => "1"}]}
+ ]
+ }
+
+ assert SourceImageParser.store_source_images(@base_dir, metadata) == %{}
+ end
+
+ test "doesn't use the entries list if it's empty" do
+ metadata = %{
+ "thumbnails" => [],
+ "entries" => []
+ }
+
+ assert SourceImageParser.store_source_images(@base_dir, metadata) == %{}
+ end
+
+ test "doesn't use the entries list if it's not present" do
+ metadata = %{
+ "thumbnails" => []
+ }
+
+ assert SourceImageParser.store_source_images(@base_dir, metadata) == %{}
+ end
+ end
end
diff --git a/test/pinchflat/metadata/source_metadata_storage_worker_test.exs b/test/pinchflat/metadata/source_metadata_storage_worker_test.exs
index 503d0f3..4965964 100644
--- a/test/pinchflat/metadata/source_metadata_storage_worker_test.exs
+++ b/test/pinchflat/metadata/source_metadata_storage_worker_test.exs
@@ -8,9 +8,6 @@ defmodule Pinchflat.Metadata.SourceMetadataStorageWorkerTest do
alias Pinchflat.Metadata.MetadataFileHelpers
alias Pinchflat.Metadata.SourceMetadataStorageWorker
- @source_details_ot "%(.{channel,channel_id,playlist_id,playlist_title,filename})j"
- @metadata_ot "playlist:%()j"
-
describe "kickoff_with_task/1" do
test "enqueues a new worker for the source" do
source = source_fixture()
@@ -32,8 +29,8 @@ defmodule Pinchflat.Metadata.SourceMetadataStorageWorkerTest do
describe "perform/1" do
test "won't call itself in an infinite loop" do
stub(YtDlpRunnerMock, :run, fn
- _url, _opts, ot when ot == @source_details_ot -> {:ok, source_details_return_fixture()}
- _url, _opts, ot when ot == @metadata_ot -> {:ok, "{}"}
+ _url, :get_source_details, _opts, _ot, _addl -> {:ok, source_details_return_fixture()}
+ _url, :get_source_metadata, _opts, _ot, _addl -> {:ok, "{}"}
end)
source = source_fixture()
@@ -51,8 +48,8 @@ defmodule Pinchflat.Metadata.SourceMetadataStorageWorkerTest do
describe "perform/1 when testing attribute updates" do
test "the source description is saved" do
stub(YtDlpRunnerMock, :run, fn
- _url, _opts, ot when ot == @source_details_ot -> {:ok, source_details_return_fixture()}
- _url, _opts, ot when ot == @metadata_ot -> {:ok, render_metadata(:channel_source_metadata)}
+ _url, :get_source_details, _opts, _ot, _addl -> {:ok, source_details_return_fixture()}
+ _url, :get_source_metadata, _opts, _ot, _addl -> {:ok, render_metadata(:channel_source_metadata)}
end)
source = source_fixture(%{description: nil})
@@ -68,8 +65,8 @@ defmodule Pinchflat.Metadata.SourceMetadataStorageWorkerTest do
describe "perform/1 when testing metadata storage" do
test "sets metadata location for source" do
stub(YtDlpRunnerMock, :run, fn
- _url, _opts, ot when ot == @source_details_ot -> {:ok, source_details_return_fixture()}
- _url, _opts, ot when ot == @metadata_ot -> {:ok, "{}"}
+ _url, :get_source_details, _opts, _ot, _addl -> {:ok, source_details_return_fixture()}
+ _url, :get_source_metadata, _opts, _ot, _addl -> {:ok, "{}"}
end)
source = Repo.preload(source_fixture(), :metadata)
@@ -87,8 +84,8 @@ defmodule Pinchflat.Metadata.SourceMetadataStorageWorkerTest do
file_contents = Phoenix.json_library().encode!(%{"title" => "test"})
stub(YtDlpRunnerMock, :run, fn
- _url, _opts, ot when ot == @source_details_ot -> {:ok, source_details_return_fixture()}
- _url, _opts, ot when ot == @metadata_ot -> {:ok, file_contents}
+ _url, :get_source_details, _opts, _ot, _addl -> {:ok, source_details_return_fixture()}
+ _url, :get_source_metadata, _opts, _ot, _addl -> {:ok, file_contents}
end)
perform_job(SourceMetadataStorageWorker, %{id: source.id})
@@ -100,8 +97,8 @@ defmodule Pinchflat.Metadata.SourceMetadataStorageWorkerTest do
test "sets metadata image location for source" do
stub(YtDlpRunnerMock, :run, fn
- _url, _opts, ot when ot == @source_details_ot -> {:ok, source_details_return_fixture()}
- _url, _opts, ot when ot == @metadata_ot -> {:ok, render_metadata(:channel_source_metadata)}
+ _url, :get_source_details, _opts, _ot, _addl -> {:ok, source_details_return_fixture()}
+ _url, :get_source_metadata, _opts, _ot, _addl -> {:ok, render_metadata(:channel_source_metadata)}
end)
source = source_fixture()
@@ -118,8 +115,8 @@ defmodule Pinchflat.Metadata.SourceMetadataStorageWorkerTest do
test "stores metadata images for source" do
stub(YtDlpRunnerMock, :run, fn
- _url, _opts, ot when ot == @source_details_ot -> {:ok, source_details_return_fixture()}
- _url, _opts, ot when ot == @metadata_ot -> {:ok, render_metadata(:channel_source_metadata)}
+ _url, :get_source_details, _opts, _ot, _addl -> {:ok, source_details_return_fixture()}
+ _url, :get_source_metadata, _opts, _ot, _addl -> {:ok, render_metadata(:channel_source_metadata)}
end)
source = source_fixture()
@@ -138,12 +135,14 @@ defmodule Pinchflat.Metadata.SourceMetadataStorageWorkerTest do
describe "perform/1 when testing source image downloading" do
test "downloads and stores source images" do
stub(YtDlpRunnerMock, :run, fn
- _url, _opts, ot when ot == @source_details_ot ->
+ _url, :get_source_details, _opts, _ot, _addl ->
filename = Path.join([Application.get_env(:pinchflat, :media_directory), "Season 1", "bar.mp4"])
{:ok, source_details_return_fixture(%{filename: filename})}
- _url, _opts, ot when ot == @metadata_ot ->
+ _url, :get_source_metadata, opts, _ot, _addl ->
+ assert {:convert_thumbnails, "jpg"} in opts
+
{:ok, render_metadata(:channel_source_metadata)}
end)
@@ -164,14 +163,50 @@ defmodule Pinchflat.Metadata.SourceMetadataStorageWorkerTest do
Sources.delete_source(source, delete_files: true)
end
+ test "calls one set of yt-dlp metadata opts for channels" do
+ stub(YtDlpRunnerMock, :run, fn
+ _url, :get_source_details, _opts, _ot, _addl ->
+ {:ok, source_details_return_fixture()}
+
+ _url, :get_source_metadata, opts, _ot, _addl ->
+ assert {:playlist_items, 0} in opts
+ assert :write_all_thumbnails in opts
+
+ {:ok, render_metadata(:channel_source_metadata)}
+ end)
+
+ profile = media_profile_fixture(%{download_source_images: true})
+ source = source_fixture(media_profile_id: profile.id, collection_type: :channel)
+
+ perform_job(SourceMetadataStorageWorker, %{id: source.id})
+ end
+
+ test "calls another set of yt-dlp metadata opts for playlists" do
+ stub(YtDlpRunnerMock, :run, fn
+ _url, :get_source_details, _opts, _ot, _addl ->
+ {:ok, source_details_return_fixture()}
+
+ _url, :get_source_metadata, opts, _ot, _addl ->
+ assert {:playlist_items, 1} in opts
+ assert :write_thumbnail in opts
+
+ {:ok, render_metadata(:channel_source_metadata)}
+ end)
+
+ profile = media_profile_fixture(%{download_source_images: true})
+ source = source_fixture(media_profile_id: profile.id, collection_type: :playlist)
+
+ perform_job(SourceMetadataStorageWorker, %{id: source.id})
+ end
+
test "does not store source images if the profile is not set to" do
stub(YtDlpRunnerMock, :run, fn
- _url, _opts, ot when ot == @source_details_ot ->
+ _url, :get_source_details, _opts, _ot, _addl ->
filename = Path.join([Application.get_env(:pinchflat, :media_directory), "Season 1", "bar.mp4"])
{:ok, source_details_return_fixture(%{filename: filename})}
- _url, _opts, ot when ot == @metadata_ot ->
+ _url, :get_source_metadata, _opts, _ot, _addl ->
{:ok, render_metadata(:channel_source_metadata)}
end)
@@ -188,12 +223,12 @@ defmodule Pinchflat.Metadata.SourceMetadataStorageWorkerTest do
test "does not store source images if the series directory cannot be determined" do
stub(YtDlpRunnerMock, :run, fn
- _url, _opts, ot when ot == @source_details_ot ->
+ _url, :get_source_details, _opts, _ot, _addl ->
filename = Path.join([Application.get_env(:pinchflat, :media_directory), "foo", "bar.mp4"])
{:ok, source_details_return_fixture(%{filename: filename})}
- _url, _opts, ot when ot == @metadata_ot ->
+ _url, :get_source_metadata, _opts, _ot, _addl ->
{:ok, render_metadata(:channel_source_metadata)}
end)
@@ -207,17 +242,65 @@ defmodule Pinchflat.Metadata.SourceMetadataStorageWorkerTest do
refute source.poster_filepath
refute source.banner_filepath
end
+
+ test "sets use_cookies if the source uses cookies" do
+ expect(YtDlpRunnerMock, :run, 2, fn
+ _url, :get_source_details, _opts, _ot, _addl ->
+ {:ok, source_details_return_fixture()}
+
+ _url, :get_source_metadata, _opts, _ot, addl ->
+ assert {:use_cookies, true} in addl
+ {:ok, render_metadata(:channel_source_metadata)}
+ end)
+
+ profile = media_profile_fixture(%{download_source_images: true})
+ source = source_fixture(media_profile_id: profile.id, cookie_behaviour: :all_operations)
+
+ perform_job(SourceMetadataStorageWorker, %{id: source.id})
+ end
+
+ test "does not set use_cookies if the source uses cookies when needed" do
+ expect(YtDlpRunnerMock, :run, 2, fn
+ _url, :get_source_details, _opts, _ot, _addl ->
+ {:ok, source_details_return_fixture()}
+
+ _url, :get_source_metadata, _opts, _ot, addl ->
+ assert {:use_cookies, false} in addl
+ {:ok, render_metadata(:channel_source_metadata)}
+ end)
+
+ profile = media_profile_fixture(%{download_source_images: true})
+ source = source_fixture(media_profile_id: profile.id, cookie_behaviour: :when_needed)
+
+ perform_job(SourceMetadataStorageWorker, %{id: source.id})
+ end
+
+ test "does not set use_cookies if the source does not use cookies" do
+ expect(YtDlpRunnerMock, :run, 2, fn
+ _url, :get_source_details, _opts, _ot, _addl ->
+ {:ok, source_details_return_fixture()}
+
+ _url, :get_source_metadata, _opts, _ot, addl ->
+ assert {:use_cookies, false} in addl
+ {:ok, render_metadata(:channel_source_metadata)}
+ end)
+
+ profile = media_profile_fixture(%{download_source_images: true})
+ source = source_fixture(media_profile_id: profile.id, cookie_behaviour: :disabled)
+
+ perform_job(SourceMetadataStorageWorker, %{id: source.id})
+ end
end
describe "perform/1 when determining the series_directory" do
test "sets the series directory based on the returned media filepath" do
stub(YtDlpRunnerMock, :run, fn
- _url, _opts, ot when ot == @source_details_ot ->
+ _url, :get_source_details, _opts, _ot, _addl ->
filename = Path.join([Application.get_env(:pinchflat, :media_directory), "Season 1", "bar.mp4"])
{:ok, source_details_return_fixture(%{filename: filename})}
- _url, _opts, ot when ot == @metadata_ot ->
+ _url, :get_source_metadata, _opts, _ot, _addl ->
{:ok, "{}"}
end)
@@ -230,12 +313,12 @@ defmodule Pinchflat.Metadata.SourceMetadataStorageWorkerTest do
test "does not set the series directory if it cannot be determined" do
stub(YtDlpRunnerMock, :run, fn
- _url, _opts, ot when ot == @source_details_ot ->
+ _url, :get_source_details, _opts, _ot, _addl ->
filename = Path.join([Application.get_env(:pinchflat, :media_directory), "foo", "bar.mp4"])
{:ok, source_details_return_fixture(%{filename: filename})}
- _url, _opts, ot when ot == @metadata_ot ->
+ _url, :get_source_metadata, _opts, _ot, _addl ->
{:ok, "{}"}
end)
@@ -245,17 +328,59 @@ defmodule Pinchflat.Metadata.SourceMetadataStorageWorkerTest do
refute source.series_directory
end
+
+ test "sets use_cookies if the source is set to use cookies" do
+ expect(YtDlpRunnerMock, :run, 2, fn
+ _url, :get_source_details, _opts, _ot, addl ->
+ assert {:use_cookies, true} in addl
+ {:ok, source_details_return_fixture()}
+
+ _url, :get_source_metadata, _opts, _ot, _addl ->
+ {:ok, "{}"}
+ end)
+
+ source = source_fixture(%{series_directory: nil, cookie_behaviour: :all_operations})
+ perform_job(SourceMetadataStorageWorker, %{id: source.id})
+ end
+
+ test "does not set use_cookies if the source uses cookies when needed" do
+ expect(YtDlpRunnerMock, :run, 2, fn
+ _url, :get_source_details, _opts, _ot, addl ->
+ assert {:use_cookies, false} in addl
+ {:ok, source_details_return_fixture()}
+
+ _url, :get_source_metadata, _opts, _ot, _addl ->
+ {:ok, "{}"}
+ end)
+
+ source = source_fixture(%{series_directory: nil, cookie_behaviour: :when_needed})
+ perform_job(SourceMetadataStorageWorker, %{id: source.id})
+ end
+
+ test "does not set use_cookies if the source is not set to use cookies" do
+ expect(YtDlpRunnerMock, :run, 2, fn
+ _url, :get_source_details, _opts, _ot, addl ->
+ assert {:use_cookies, false} in addl
+ {:ok, source_details_return_fixture()}
+
+ _url, :get_source_metadata, _opts, _ot, _addl ->
+ {:ok, "{}"}
+ end)
+
+ source = source_fixture(%{series_directory: nil, cookie_behaviour: :disabled})
+ perform_job(SourceMetadataStorageWorker, %{id: source.id})
+ end
end
describe "perform/1 when storing the series NFO" do
test "stores the NFO if specified" do
stub(YtDlpRunnerMock, :run, fn
- _url, _opts, ot when ot == @source_details_ot ->
+ _url, :get_source_details, _opts, _ot, _addl ->
filename = Path.join([Application.get_env(:pinchflat, :media_directory), "Season 1", "bar.mp4"])
{:ok, source_details_return_fixture(%{filename: filename})}
- _url, _opts, ot when ot == @metadata_ot ->
+ _url, :get_source_metadata, _opts, _ot, _addl ->
{:ok, "{}"}
end)
@@ -273,12 +398,12 @@ defmodule Pinchflat.Metadata.SourceMetadataStorageWorkerTest do
test "does not store the NFO if not specified" do
stub(YtDlpRunnerMock, :run, fn
- _url, _opts, ot when ot == @source_details_ot ->
+ _url, :get_source_details, _opts, _ot, _addl ->
filename = Path.join([Application.get_env(:pinchflat, :media_directory), "Season 1", "bar.mp4"])
{:ok, source_details_return_fixture(%{filename: filename})}
- _url, _opts, ot when ot == @metadata_ot ->
+ _url, :get_source_metadata, _opts, _ot, _addl ->
{:ok, "{}"}
end)
@@ -292,12 +417,12 @@ defmodule Pinchflat.Metadata.SourceMetadataStorageWorkerTest do
test "does not store the NFO if the series directory cannot be determined" do
stub(YtDlpRunnerMock, :run, fn
- _url, _opts, ot when ot == @source_details_ot ->
+ _url, :get_source_details, _opts, _ot, _addl ->
filename = Path.join([Application.get_env(:pinchflat, :media_directory), "foo", "bar.mp4"])
{:ok, source_details_return_fixture(%{filename: filename})}
- _url, _opts, ot when ot == @metadata_ot ->
+ _url, :get_source_metadata, _opts, _ot, _addl ->
{:ok, "{}"}
end)
diff --git a/test/pinchflat/podcasts/opml_feed_builder_test.exs b/test/pinchflat/podcasts/opml_feed_builder_test.exs
new file mode 100644
index 0000000..460de76
--- /dev/null
+++ b/test/pinchflat/podcasts/opml_feed_builder_test.exs
@@ -0,0 +1,34 @@
+defmodule Pinchflat.Podcasts.OpmlFeedBuilderTest do
+ use Pinchflat.DataCase
+
+ import Pinchflat.SourcesFixtures
+
+ alias Pinchflat.Podcasts.OpmlFeedBuilder
+
+ setup do
+ source = source_fixture()
+
+ {:ok, source: source}
+ end
+
+ describe "build/2" do
+ test "returns an XML document", %{source: source} do
+ res = OpmlFeedBuilder.build("http://example.com", [source])
+
+ assert String.contains?(res, ~s())
+ end
+
+ test "escapes illegal characters" do
+ source = source_fixture(%{custom_name: "A & B"})
+ res = OpmlFeedBuilder.build("http://example.com", [source])
+
+ assert String.contains?(res, ~s(A & B))
+ end
+
+ test "build podcast link with URL base", %{source: source} do
+ res = OpmlFeedBuilder.build("http://example.com", [source])
+
+ assert String.contains?(res, ~s(http://example.com/sources/#{source.uuid}/feed.xml))
+ end
+ end
+end
diff --git a/test/pinchflat/podcasts/podcast_helpers_test.exs b/test/pinchflat/podcasts/podcast_helpers_test.exs
index 4499c73..ae88006 100644
--- a/test/pinchflat/podcasts/podcast_helpers_test.exs
+++ b/test/pinchflat/podcasts/podcast_helpers_test.exs
@@ -6,6 +6,16 @@ defmodule Pinchflat.Podcasts.PodcastHelpersTest do
alias Pinchflat.Podcasts.PodcastHelpers
+ describe "opml_sources" do
+ test "returns sources not marked for deletion" do
+ source = source_fixture()
+ source_fixture(%{marked_for_deletion_at: DateTime.utc_now()})
+ assert [found_source] = PodcastHelpers.opml_sources()
+ assert found_source.custom_name == source.custom_name
+ assert found_source.uuid == source.uuid
+ end
+ end
+
describe "persisted_media_items_for/2" do
test "returns media items with files that exist on-disk" do
source = source_fixture()
diff --git a/test/pinchflat/profiles/media_profile_deletion_worker_test.exs b/test/pinchflat/profiles/media_profile_deletion_worker_test.exs
new file mode 100644
index 0000000..b91aace
--- /dev/null
+++ b/test/pinchflat/profiles/media_profile_deletion_worker_test.exs
@@ -0,0 +1,57 @@
+defmodule Pinchflat.Profiles.MediaProfileDeletionWorkerTest do
+ use Pinchflat.DataCase
+
+ import Pinchflat.MediaFixtures
+ import Pinchflat.SourcesFixtures
+ import Pinchflat.ProfilesFixtures
+
+ alias Pinchflat.Profiles.MediaProfileDeletionWorker
+
+ setup do
+ stub(UserScriptRunnerMock, :run, fn _event_type, _data -> :ok end)
+
+ {:ok, %{profile: media_profile_fixture()}}
+ end
+
+ describe "kickoff/3" do
+ test "starts the worker", %{profile: profile} do
+ assert [] = all_enqueued(worker: MediaProfileDeletionWorker)
+ assert {:ok, _} = MediaProfileDeletionWorker.kickoff(profile)
+ assert [_] = all_enqueued(worker: MediaProfileDeletionWorker)
+ end
+
+ test "can be called with additional job arguments", %{profile: profile} do
+ job_args = %{"delete_files" => true}
+
+ assert {:ok, _} = MediaProfileDeletionWorker.kickoff(profile, job_args)
+
+ assert_enqueued(worker: MediaProfileDeletionWorker, args: %{"id" => profile.id, "delete_files" => true})
+ end
+ end
+
+ describe "perform/1" do
+ test "deletes the profile, sources, and media but leaves the files", %{profile: profile} do
+ source = source_fixture(%{media_profile_id: profile.id})
+ media_item = media_item_with_attachments(%{source_id: source.id})
+
+ perform_job(MediaProfileDeletionWorker, %{"id" => profile.id})
+
+ assert_raise Ecto.NoResultsError, fn -> Repo.reload!(profile) end
+ assert_raise Ecto.NoResultsError, fn -> Repo.reload!(source) end
+ assert_raise Ecto.NoResultsError, fn -> Repo.reload!(media_item) end
+ assert File.exists?(media_item.media_filepath)
+ end
+
+ test "deletes the profile, sources, and media and files if specified", %{profile: profile} do
+ source = source_fixture(%{media_profile_id: profile.id})
+ media_item = media_item_with_attachments(%{source_id: source.id})
+
+ perform_job(MediaProfileDeletionWorker, %{"id" => profile.id, "delete_files" => true})
+
+ assert_raise Ecto.NoResultsError, fn -> Repo.reload!(profile) end
+ assert_raise Ecto.NoResultsError, fn -> Repo.reload!(source) end
+ assert_raise Ecto.NoResultsError, fn -> Repo.reload!(media_item) end
+ refute File.exists?(media_item.media_filepath)
+ end
+ end
+end
diff --git a/test/pinchflat/profiles_test.exs b/test/pinchflat/profiles_test.exs
index 4cf6437..6f78e7f 100644
--- a/test/pinchflat/profiles_test.exs
+++ b/test/pinchflat/profiles_test.exs
@@ -113,7 +113,7 @@ defmodule Pinchflat.ProfilesTest do
describe "delete_media_profile/2 when deleting files" do
setup do
- stub(UserScriptRunnerMock, :run, fn _event_type, _data -> :ok end)
+ stub(UserScriptRunnerMock, :run, fn _event_type, _data -> {:ok, "", 0} end)
:ok
end
diff --git a/test/pinchflat/settings_test.exs b/test/pinchflat/settings_test.exs
index 944eaf1..774e274 100644
--- a/test/pinchflat/settings_test.exs
+++ b/test/pinchflat/settings_test.exs
@@ -77,5 +77,20 @@ defmodule Pinchflat.SettingsTest do
assert %Ecto.Changeset{} = Settings.change_setting(setting, %{onboarding: true})
end
+
+ test "ensures the extractor sleep interval is positive" do
+ setting = Settings.record()
+
+ assert %Ecto.Changeset{valid?: true} = Settings.change_setting(setting, %{extractor_sleep_interval_seconds: 1})
+ assert %Ecto.Changeset{valid?: true} = Settings.change_setting(setting, %{extractor_sleep_interval_seconds: 0})
+ assert %Ecto.Changeset{valid?: false} = Settings.change_setting(setting, %{extractor_sleep_interval_seconds: -1})
+ end
+
+ test "allows you to reset the extractor sleep interval" do
+ setting = Settings.record()
+ assert {:ok, setting} = Settings.update_setting(setting, %{extractor_sleep_interval_seconds: 1})
+
+ assert %Ecto.Changeset{valid?: true} = Settings.change_setting(setting, %{extractor_sleep_interval_seconds: 0})
+ end
end
end
diff --git a/test/pinchflat/slow_indexing/file_follower_server_test.exs b/test/pinchflat/slow_indexing/file_follower_server_test.exs
index 4e9e33a..66ac520 100644
--- a/test/pinchflat/slow_indexing/file_follower_server_test.exs
+++ b/test/pinchflat/slow_indexing/file_follower_server_test.exs
@@ -1,7 +1,7 @@
defmodule Pinchflat.SlowIndexing.FileFollowerServerTest do
use Pinchflat.DataCase
- alias alias Pinchflat.Utils.FilesystemUtils
+ alias Pinchflat.Utils.FilesystemUtils
alias Pinchflat.SlowIndexing.FileFollowerServer
setup do
diff --git a/test/pinchflat/slow_indexing/media_collection_indexing_worker_test.exs b/test/pinchflat/slow_indexing/media_collection_indexing_worker_test.exs
index 321c8c9..c8b5360 100644
--- a/test/pinchflat/slow_indexing/media_collection_indexing_worker_test.exs
+++ b/test/pinchflat/slow_indexing/media_collection_indexing_worker_test.exs
@@ -50,45 +50,69 @@ defmodule Pinchflat.SlowIndexing.MediaCollectionIndexingWorkerTest do
describe "perform/1" do
setup do
+ stub(YtDlpRunnerMock, :run, fn _url, :get_media_attributes_for_collection, _opts, _ot, _addl_opts -> {:ok, ""} end)
+
stub(AppriseRunnerMock, :run, fn _, _ -> {:ok, ""} end)
:ok
end
- test "it indexes the source if it should be indexed" do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl_opts -> {:ok, ""} end)
+ test "indexes the source if it should be indexed" do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_media_attributes_for_collection, _opts, _ot, _addl_opts ->
+ {:ok, ""}
+ end)
source = source_fixture(index_frequency_minutes: 10)
perform_job(MediaCollectionIndexingWorker, %{id: source.id})
end
- test "it indexes the source no matter what if the source has never been indexed before" do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl_opts -> {:ok, ""} end)
+ test "indexes the source no matter what if the source has never been indexed before" do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_media_attributes_for_collection, _opts, _ot, _addl_opts ->
+ {:ok, ""}
+ end)
source = source_fixture(index_frequency_minutes: 0, last_indexed_at: nil)
perform_job(MediaCollectionIndexingWorker, %{id: source.id})
end
- test "it indexes the source no matter what if the 'force' arg is passed" do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl_opts -> {:ok, ""} end)
+ test "indexes the source no matter what if the 'force' arg is passed" do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_media_attributes_for_collection, _opts, _ot, _addl_opts ->
+ {:ok, ""}
+ end)
source = source_fixture(index_frequency_minutes: 0, last_indexed_at: DateTime.utc_now())
perform_job(MediaCollectionIndexingWorker, %{id: source.id, force: true})
end
- test "it does not do any indexing if the source has been indexed and shouldn't be rescheduled" do
- expect(YtDlpRunnerMock, :run, 0, fn _url, _opts, _ot, _addl_opts -> {:ok, ""} end)
+ test "doesn't use a download archive if the index has been forced" do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_media_attributes_for_collection, opts, _ot, _addl_opts ->
+ refute :break_on_existing in opts
+ refute Keyword.has_key?(opts, :download_archive)
+
+ {:ok, ""}
+ end)
+
+ source =
+ source_fixture(collection_type: :channel, index_frequency_minutes: 0, last_indexed_at: DateTime.utc_now())
+
+ perform_job(MediaCollectionIndexingWorker, %{id: source.id, force: true})
+ end
+
+ test "does not do any indexing if the source has been indexed and shouldn't be rescheduled" do
+ expect(YtDlpRunnerMock, :run, 0, fn _url, :get_media_attributes_for_collection, _opts, _ot, _addl_opts ->
+ {:ok, ""}
+ end)
source = source_fixture(index_frequency_minutes: -1, last_indexed_at: DateTime.utc_now())
perform_job(MediaCollectionIndexingWorker, %{id: source.id})
end
- test "it does not reschedule if the source shouldn't be indexed" do
- stub(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl_opts -> {:ok, ""} end)
+ test "does not reschedule if the source shouldn't be indexed" do
+ stub(YtDlpRunnerMock, :run, fn _url, :get_media_attributes_for_collection, _opts, _ot, _addl_opts -> {:ok, ""} end)
source = source_fixture(index_frequency_minutes: -1)
perform_job(MediaCollectionIndexingWorker, %{id: source.id})
@@ -96,8 +120,8 @@ defmodule Pinchflat.SlowIndexing.MediaCollectionIndexingWorkerTest do
refute_enqueued(worker: MediaCollectionIndexingWorker, args: %{"id" => source.id})
end
- test "it kicks off a download job for each pending media item" do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl_opts ->
+ test "kicks off a download job for each pending media item" do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_media_attributes_for_collection, _opts, _ot, _addl_opts ->
{:ok, source_attributes_return_fixture()}
end)
@@ -107,8 +131,8 @@ defmodule Pinchflat.SlowIndexing.MediaCollectionIndexingWorkerTest do
assert length(all_enqueued(worker: MediaDownloadWorker)) == 3
end
- test "it starts a job for any pending media item even if it's from another run" do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl_opts ->
+ test "starts a job for any pending media item even if it's from another run" do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_media_attributes_for_collection, _opts, _ot, _addl_opts ->
{:ok, source_attributes_return_fixture()}
end)
@@ -119,8 +143,8 @@ defmodule Pinchflat.SlowIndexing.MediaCollectionIndexingWorkerTest do
assert length(all_enqueued(worker: MediaDownloadWorker)) == 4
end
- test "it does not kick off a job for media items that could not be saved" do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl_opts ->
+ test "does not kick off a job for media items that could not be saved" do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_media_attributes_for_collection, _opts, _ot, _addl_opts ->
{:ok, source_attributes_return_fixture()}
end)
@@ -132,9 +156,7 @@ defmodule Pinchflat.SlowIndexing.MediaCollectionIndexingWorkerTest do
assert length(all_enqueued(worker: MediaDownloadWorker))
end
- test "it reschedules the job based on the index frequency" do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl_opts -> {:ok, ""} end)
-
+ test "reschedules the job based on the index frequency" do
source = source_fixture(index_frequency_minutes: 10)
perform_job(MediaCollectionIndexingWorker, %{id: source.id})
@@ -145,9 +167,7 @@ defmodule Pinchflat.SlowIndexing.MediaCollectionIndexingWorkerTest do
)
end
- test "it creates a task for the rescheduled job" do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl_opts -> {:ok, ""} end)
-
+ test "creates a task for the rescheduled job" do
source = source_fixture(index_frequency_minutes: 10)
task_count_fetcher = fn ->
@@ -159,9 +179,7 @@ defmodule Pinchflat.SlowIndexing.MediaCollectionIndexingWorkerTest do
end)
end
- test "it creates a future task for fast indexing if appropriate" do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl_opts -> {:ok, ""} end)
-
+ test "creates a future task for fast indexing if appropriate" do
source = source_fixture(index_frequency_minutes: 10, fast_index: true)
perform_job(MediaCollectionIndexingWorker, %{id: source.id})
@@ -172,9 +190,7 @@ defmodule Pinchflat.SlowIndexing.MediaCollectionIndexingWorkerTest do
)
end
- test "it deletes existing fast indexing tasks if a new one is created" do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl_opts -> {:ok, ""} end)
-
+ test "deletes existing fast indexing tasks if a new one is created" do
source = source_fixture(index_frequency_minutes: 10, fast_index: true)
{:ok, job} = Oban.insert(FastIndexingWorker.new(%{"id" => source.id}))
task = task_fixture(source_id: source.id, job_id: job.id)
@@ -184,17 +200,17 @@ defmodule Pinchflat.SlowIndexing.MediaCollectionIndexingWorkerTest do
assert_raise Ecto.NoResultsError, fn -> Repo.reload!(task) end
end
- test "it does not create a task for fast indexing otherwise" do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl_opts -> {:ok, ""} end)
-
+ test "does not create a task for fast indexing otherwise" do
source = source_fixture(index_frequency_minutes: 10, fast_index: false)
perform_job(MediaCollectionIndexingWorker, %{id: source.id})
refute_enqueued(worker: FastIndexingWorker)
end
- test "it creates the basic media_item records" do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl_opts -> {:ok, source_attributes_return_fixture()} end)
+ test "creates the basic media_item records" do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_media_attributes_for_collection, _opts, _ot, _addl_opts ->
+ {:ok, source_attributes_return_fixture()}
+ end)
source = source_fixture(index_frequency_minutes: 10)
@@ -225,7 +241,7 @@ defmodule Pinchflat.SlowIndexing.MediaCollectionIndexingWorkerTest do
test "sends a notification if new media was found" do
source = source_fixture()
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl_opts ->
+ expect(YtDlpRunnerMock, :run, fn _url, :get_media_attributes_for_collection, _opts, _ot, _addl_opts ->
{:ok, source_attributes_return_fixture()}
end)
diff --git a/test/pinchflat/slow_indexing/slow_indexing_helpers_test.exs b/test/pinchflat/slow_indexing/slow_indexing_helpers_test.exs
index bdd721f..d37721a 100644
--- a/test/pinchflat/slow_indexing/slow_indexing_helpers_test.exs
+++ b/test/pinchflat/slow_indexing/slow_indexing_helpers_test.exs
@@ -14,8 +14,12 @@ defmodule Pinchflat.SlowIndexing.SlowIndexingHelpersTest do
alias Pinchflat.SlowIndexing.SlowIndexingHelpers
alias Pinchflat.SlowIndexing.MediaCollectionIndexingWorker
+ setup do
+ {:ok, %{source: source_fixture()}}
+ end
+
describe "kickoff_indexing_task/3" do
- test "it schedules a job" do
+ test "schedules a job" do
source = source_fixture(index_frequency_minutes: 1)
assert {:ok, _} = SlowIndexingHelpers.kickoff_indexing_task(source)
@@ -23,7 +27,47 @@ defmodule Pinchflat.SlowIndexing.SlowIndexingHelpersTest do
assert_enqueued(worker: MediaCollectionIndexingWorker, args: %{"id" => source.id})
end
- test "it creates and attaches a task" do
+ test "schedules a job for the future based on when the source was last indexed" do
+ source = source_fixture(index_frequency_minutes: 30, last_indexed_at: now_minus(5, :minutes))
+
+ assert {:ok, _} = SlowIndexingHelpers.kickoff_indexing_task(source)
+
+ [job] = all_enqueued(worker: MediaCollectionIndexingWorker, args: %{"id" => source.id})
+
+ assert_in_delta DateTime.diff(job.scheduled_at, DateTime.utc_now(), :minute), 25, 1
+ end
+
+ test "schedules a job immediately if the source was indexed far in the past" do
+ source = source_fixture(index_frequency_minutes: 30, last_indexed_at: now_minus(60, :minutes))
+
+ assert {:ok, _} = SlowIndexingHelpers.kickoff_indexing_task(source)
+
+ [job] = all_enqueued(worker: MediaCollectionIndexingWorker, args: %{"id" => source.id})
+
+ assert_in_delta DateTime.diff(job.scheduled_at, DateTime.utc_now(), :second), 0, 1
+ end
+
+ test "schedules a job immediately if the source has never been indexed" do
+ source = source_fixture(index_frequency_minutes: 30, last_indexed_at: nil)
+
+ assert {:ok, _} = SlowIndexingHelpers.kickoff_indexing_task(source)
+
+ [job] = all_enqueued(worker: MediaCollectionIndexingWorker, args: %{"id" => source.id})
+
+ assert_in_delta DateTime.diff(job.scheduled_at, DateTime.utc_now(), :second), 0, 1
+ end
+
+ test "schedules a job immediately if the user is forcing an index" do
+ source = source_fixture(index_frequency_minutes: 30, last_indexed_at: now_minus(5, :minutes))
+
+ assert {:ok, _} = SlowIndexingHelpers.kickoff_indexing_task(source, %{force: true})
+
+ [job] = all_enqueued(worker: MediaCollectionIndexingWorker, args: %{"id" => source.id})
+
+ assert_in_delta DateTime.diff(job.scheduled_at, DateTime.utc_now(), :second), 0, 1
+ end
+
+ test "creates and attaches a task" do
source = source_fixture(index_frequency_minutes: 1)
assert {:ok, %Task{} = task} = SlowIndexingHelpers.kickoff_indexing_task(source)
@@ -31,7 +75,7 @@ defmodule Pinchflat.SlowIndexing.SlowIndexingHelpersTest do
assert task.source_id == source.id
end
- test "it deletes any pending media collection tasks for the source" do
+ test "deletes any pending media collection tasks for the source" do
source = source_fixture()
{:ok, job} = Oban.insert(MediaCollectionIndexingWorker.new(%{"id" => source.id}))
task = task_fixture(source_id: source.id, job_id: job.id)
@@ -41,20 +85,11 @@ defmodule Pinchflat.SlowIndexing.SlowIndexingHelpersTest do
assert_raise Ecto.NoResultsError, fn -> Repo.reload!(task) end
end
- test "it deletes any pending media tasks for the source" do
+ test "deletes any executing media collection tasks for the source" do
source = source_fixture()
- {:ok, job} = Oban.insert(FastIndexingWorker.new(%{"id" => source.id}))
- task = task_fixture(source_id: source.id, job_id: job.id)
-
- assert {:ok, _} = SlowIndexingHelpers.kickoff_indexing_task(source)
-
- assert_raise Ecto.NoResultsError, fn -> Repo.reload!(task) end
- end
-
- test "it deletes any fast indexing tasks for the source" do
- source = source_fixture()
- {:ok, job} = Oban.insert(FastIndexingWorker.new(%{"id" => source.id}))
+ {:ok, job} = Oban.insert(MediaCollectionIndexingWorker.new(%{"id" => source.id}))
task = task_fixture(source_id: source.id, job_id: job.id)
+ Repo.update_all(from(Oban.Job, where: [id: ^task.job_id], update: [set: [state: "executing"]]), [])
assert {:ok, _} = SlowIndexingHelpers.kickoff_indexing_task(source)
@@ -81,16 +116,60 @@ defmodule Pinchflat.SlowIndexing.SlowIndexingHelpersTest do
end
end
- describe "index_and_enqueue_download_for_media_items/1" do
+ describe "delete_indexing_tasks/2" do
+ test "deletes slow indexing tasks for the source", %{source: source} do
+ {:ok, job} = Oban.insert(MediaCollectionIndexingWorker.new(%{"id" => source.id}))
+ _task = task_fixture(source_id: source.id, job_id: job.id)
+
+ assert_enqueued(worker: MediaCollectionIndexingWorker, args: %{"id" => source.id})
+ assert :ok = SlowIndexingHelpers.delete_indexing_tasks(source)
+ refute_enqueued(worker: MediaCollectionIndexingWorker)
+ end
+
+ test "deletes fast indexing tasks for the source", %{source: source} do
+ {:ok, job} = Oban.insert(FastIndexingWorker.new(%{"id" => source.id}))
+ _task = task_fixture(source_id: source.id, job_id: job.id)
+
+ assert_enqueued(worker: FastIndexingWorker, args: %{"id" => source.id})
+ assert :ok = SlowIndexingHelpers.delete_indexing_tasks(source)
+ refute_enqueued(worker: FastIndexingWorker)
+ end
+
+ test "doesn't normally delete currently executing tasks", %{source: source} do
+ {:ok, job} = Oban.insert(MediaCollectionIndexingWorker.new(%{"id" => source.id}))
+ task = task_fixture(source_id: source.id, job_id: job.id)
+
+ from(Oban.Job, where: [id: ^job.id], update: [set: [state: "executing"]])
+ |> Repo.update_all([])
+
+ assert Repo.reload!(task)
+ assert :ok = SlowIndexingHelpers.delete_indexing_tasks(source)
+ assert Repo.reload!(task)
+ end
+
+ test "can optionally delete currently executing tasks", %{source: source} do
+ {:ok, job} = Oban.insert(MediaCollectionIndexingWorker.new(%{"id" => source.id}))
+ task = task_fixture(source_id: source.id, job_id: job.id)
+
+ from(Oban.Job, where: [id: ^job.id], update: [set: [state: "executing"]])
+ |> Repo.update_all([])
+
+ assert Repo.reload!(task)
+ assert :ok = SlowIndexingHelpers.delete_indexing_tasks(source, include_executing: true)
+ assert_raise Ecto.NoResultsError, fn -> Repo.reload!(task) end
+ end
+ end
+
+ describe "index_and_enqueue_download_for_media_items/2" do
setup do
- stub(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl_opts ->
+ stub(YtDlpRunnerMock, :run, fn _url, :get_media_attributes_for_collection, _opts, _ot, _addl_opts ->
{:ok, source_attributes_return_fixture()}
end)
- {:ok, [source: source_fixture()]}
+ :ok
end
- test "it creates a media_item record for each media ID returned", %{source: source} do
+ test "creates a media_item record for each media ID returned", %{source: source} do
assert media_items = SlowIndexingHelpers.index_and_enqueue_download_for_media_items(source)
assert Enum.count(media_items) == 3
@@ -101,7 +180,7 @@ defmodule Pinchflat.SlowIndexing.SlowIndexingHelpersTest do
assert Enum.all?(media_items, fn %MediaItem{} -> true end)
end
- test "it attaches all media_items to the given source", %{source: source} do
+ test "attaches all media_items to the given source", %{source: source} do
source_id = source.id
assert media_items = SlowIndexingHelpers.index_and_enqueue_download_for_media_items(source)
@@ -109,7 +188,7 @@ defmodule Pinchflat.SlowIndexing.SlowIndexingHelpersTest do
assert Enum.all?(media_items, fn %MediaItem{source_id: ^source_id} -> true end)
end
- test "it won't duplicate media_items based on media_id and source", %{source: source} do
+ test "won't duplicate media_items based on media_id and source", %{source: source} do
_first_run = SlowIndexingHelpers.index_and_enqueue_download_for_media_items(source)
_duplicate_run = SlowIndexingHelpers.index_and_enqueue_download_for_media_items(source)
@@ -117,7 +196,7 @@ defmodule Pinchflat.SlowIndexing.SlowIndexingHelpersTest do
assert Enum.count(media_items) == 3
end
- test "it can duplicate media_ids for different sources", %{source: source} do
+ test "can duplicate media_ids for different sources", %{source: source} do
other_source = source_fixture()
media_items = SlowIndexingHelpers.index_and_enqueue_download_for_media_items(source)
@@ -130,7 +209,7 @@ defmodule Pinchflat.SlowIndexing.SlowIndexingHelpersTest do
Enum.map(media_items_other_source, & &1.media_id)
end
- test "it returns a list of media_items", %{source: source} do
+ test "returns a list of media_items", %{source: source} do
first_run = SlowIndexingHelpers.index_and_enqueue_download_for_media_items(source)
duplicate_run = SlowIndexingHelpers.index_and_enqueue_download_for_media_items(source)
@@ -140,7 +219,7 @@ defmodule Pinchflat.SlowIndexing.SlowIndexingHelpersTest do
assert first_ids == duplicate_ids
end
- test "it updates the source's last_indexed_at field", %{source: source} do
+ test "updates the source's last_indexed_at field", %{source: source} do
assert source.last_indexed_at == nil
SlowIndexingHelpers.index_and_enqueue_download_for_media_items(source)
@@ -149,7 +228,7 @@ defmodule Pinchflat.SlowIndexing.SlowIndexingHelpersTest do
assert DateTime.diff(DateTime.utc_now(), source.last_indexed_at) < 2
end
- test "it enqueues a job for each pending media item" do
+ test "enqueues a job for each pending media item" do
source = source_fixture()
media_item = media_item_fixture(source_id: source.id, media_filepath: nil)
@@ -158,7 +237,7 @@ defmodule Pinchflat.SlowIndexing.SlowIndexingHelpersTest do
assert_enqueued(worker: MediaDownloadWorker, args: %{"id" => media_item.id})
end
- test "it does not attach tasks if the source is set to not download" do
+ test "does not attach tasks if the source is set to not download" do
source = source_fixture(download_media: false)
media_item = media_item_fixture(source_id: source.id, media_filepath: nil)
@@ -167,17 +246,17 @@ defmodule Pinchflat.SlowIndexing.SlowIndexingHelpersTest do
assert [] = Tasks.list_tasks_for(media_item)
end
- test "it doesn't blow up if a media item cannot be coerced into a struct", %{source: source} do
- stub(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl_opts ->
+ test "doesn't blow up if a media item cannot be coerced into a struct", %{source: source} do
+ stub(YtDlpRunnerMock, :run, fn _url, :get_media_attributes_for_collection, _opts, _ot, _addl_opts ->
response =
Phoenix.json_library().encode!(%{
id: "video3",
title: "Video 3",
- was_live: false,
+ live_status: "not_live",
description: "desc3",
# Only focusing on these because these are passed to functions that
# could fail if they're not present
- webpage_url: nil,
+ original_url: nil,
aspect_ratio: nil,
duration: nil,
upload_date: nil
@@ -190,17 +269,81 @@ defmodule Pinchflat.SlowIndexing.SlowIndexingHelpersTest do
assert %Ecto.Changeset{} = changeset
end
- end
- describe "index_and_enqueue_download_for_media_items/1 when testing file watcher" do
- setup do
- {:ok, [source: source_fixture()]}
+ test "doesn't blow up if the media item cannot be saved", %{source: source} do
+ stub(YtDlpRunnerMock, :run, fn _url, :get_media_attributes_for_collection, _opts, _ot, _addl_opts ->
+ response =
+ Phoenix.json_library().encode!(%{
+ id: "video1",
+ # This is a disallowed title - see MediaItem changeset or issue #549
+ title: "youtube video #123",
+ original_url: "https://example.com/video1",
+ live_status: "not_live",
+ description: "desc1",
+ aspect_ratio: 1.67,
+ duration: 12.34,
+ upload_date: "20210101"
+ })
+
+ {:ok, response}
+ end)
+
+ assert [changeset] = SlowIndexingHelpers.index_and_enqueue_download_for_media_items(source)
+
+ assert %Ecto.Changeset{} = changeset
end
+ test "passes the source's download options to the yt-dlp runner", %{source: source} do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_media_attributes_for_collection, opts, _ot, _addl_opts ->
+ assert {:output, "/tmp/test/media/%(title)S.%(ext)S"} in opts
+ assert {:remux_video, "mp4"} in opts
+ {:ok, source_attributes_return_fixture()}
+ end)
+
+ SlowIndexingHelpers.index_and_enqueue_download_for_media_items(source)
+ end
+ end
+
+ describe "index_and_enqueue_download_for_media_items/2 when testing cookies" do
+ test "sets use_cookies if the source uses cookies" do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_media_attributes_for_collection, _opts, _ot, addl_opts ->
+ assert {:use_cookies, true} in addl_opts
+ {:ok, source_attributes_return_fixture()}
+ end)
+
+ source = source_fixture(%{cookie_behaviour: :all_operations})
+
+ SlowIndexingHelpers.index_and_enqueue_download_for_media_items(source)
+ end
+
+ test "sets use_cookies if the source uses cookies when needed" do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_media_attributes_for_collection, _opts, _ot, addl_opts ->
+ assert {:use_cookies, true} in addl_opts
+ {:ok, source_attributes_return_fixture()}
+ end)
+
+ source = source_fixture(%{cookie_behaviour: :when_needed})
+
+ SlowIndexingHelpers.index_and_enqueue_download_for_media_items(source)
+ end
+
+ test "doesn't set use_cookies if the source doesn't use cookies" do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_media_attributes_for_collection, _opts, _ot, addl_opts ->
+ assert {:use_cookies, false} in addl_opts
+ {:ok, source_attributes_return_fixture()}
+ end)
+
+ source = source_fixture(%{cookie_behaviour: :disabled})
+
+ SlowIndexingHelpers.index_and_enqueue_download_for_media_items(source)
+ end
+ end
+
+ describe "index_and_enqueue_download_for_media_items/2 when testing file watcher" do
test "creates a new media item for everything already in the file", %{source: source} do
watcher_poll_interval = Application.get_env(:pinchflat, :file_watcher_poll_interval)
- stub(YtDlpRunnerMock, :run, fn _url, _opts, _ot, addl_opts ->
+ stub(YtDlpRunnerMock, :run, fn _url, :get_media_attributes_for_collection, _opts, _ot, addl_opts ->
filepath = Keyword.get(addl_opts, :output_filepath)
File.write(filepath, source_attributes_return_fixture())
@@ -219,7 +362,7 @@ defmodule Pinchflat.SlowIndexing.SlowIndexingHelpersTest do
test "enqueues a download for everything already in the file", %{source: source} do
watcher_poll_interval = Application.get_env(:pinchflat, :file_watcher_poll_interval)
- stub(YtDlpRunnerMock, :run, fn _url, _opts, _ot, addl_opts ->
+ stub(YtDlpRunnerMock, :run, fn _url, :get_media_attributes_for_collection, _opts, _ot, addl_opts ->
filepath = Keyword.get(addl_opts, :output_filepath)
File.write(filepath, source_attributes_return_fixture())
@@ -239,7 +382,7 @@ defmodule Pinchflat.SlowIndexing.SlowIndexingHelpersTest do
watcher_poll_interval = Application.get_env(:pinchflat, :file_watcher_poll_interval)
source = source_fixture(download_media: false)
- stub(YtDlpRunnerMock, :run, fn _url, _opts, _ot, addl_opts ->
+ stub(YtDlpRunnerMock, :run, fn _url, :get_media_attributes_for_collection, _opts, _ot, addl_opts ->
filepath = Keyword.get(addl_opts, :output_filepath)
File.write(filepath, source_attributes_return_fixture())
@@ -259,15 +402,15 @@ defmodule Pinchflat.SlowIndexing.SlowIndexingHelpersTest do
profile = media_profile_fixture(%{shorts_behaviour: :exclude})
source = source_fixture(%{media_profile_id: profile.id})
- stub(YtDlpRunnerMock, :run, fn _url, _opts, _ot, addl_opts ->
+ stub(YtDlpRunnerMock, :run, fn _url, :get_media_attributes_for_collection, _opts, _ot, addl_opts ->
filepath = Keyword.get(addl_opts, :output_filepath)
contents =
Phoenix.json_library().encode!(%{
id: "video2",
title: "Video 2",
- webpage_url: "https://example.com/shorts/video2",
- was_live: true,
+ original_url: "https://example.com/shorts/video2",
+ live_status: "is_live",
description: "desc2",
aspect_ratio: 1.67,
duration: 345.67,
@@ -290,7 +433,7 @@ defmodule Pinchflat.SlowIndexing.SlowIndexingHelpersTest do
test "does not enqueue multiple download jobs for the same media items", %{source: source} do
watcher_poll_interval = Application.get_env(:pinchflat, :file_watcher_poll_interval)
- stub(YtDlpRunnerMock, :run, fn _url, _opts, _ot, addl_opts ->
+ stub(YtDlpRunnerMock, :run, fn _url, :get_media_attributes_for_collection, _opts, _ot, addl_opts ->
filepath = Keyword.get(addl_opts, :output_filepath)
File.write(filepath, source_attributes_return_fixture())
@@ -309,7 +452,7 @@ defmodule Pinchflat.SlowIndexing.SlowIndexingHelpersTest do
test "does not blow up if the file returns invalid json", %{source: source} do
watcher_poll_interval = Application.get_env(:pinchflat, :file_watcher_poll_interval)
- stub(YtDlpRunnerMock, :run, fn _url, _opts, _ot, addl_opts ->
+ stub(YtDlpRunnerMock, :run, fn _url, :get_media_attributes_for_collection, _opts, _ot, addl_opts ->
filepath = Keyword.get(addl_opts, :output_filepath)
File.write(filepath, "INVALID")
@@ -323,4 +466,79 @@ defmodule Pinchflat.SlowIndexing.SlowIndexingHelpersTest do
assert [] = SlowIndexingHelpers.index_and_enqueue_download_for_media_items(source)
end
end
+
+ describe "index_and_enqueue_download_for_media_items when testing the download archive" do
+ test "a download archive is used if the source is a channel that has been indexed before" do
+ source = source_fixture(%{collection_type: :channel, last_indexed_at: now()})
+
+ expect(YtDlpRunnerMock, :run, fn _url, :get_media_attributes_for_collection, opts, _ot, _addl_opts ->
+ assert :break_on_existing in opts
+ assert Keyword.has_key?(opts, :download_archive)
+
+ {:ok, source_attributes_return_fixture()}
+ end)
+
+ SlowIndexingHelpers.index_and_enqueue_download_for_media_items(source)
+ end
+
+ test "a download archive is not used if the source is not a channel" do
+ source = source_fixture(%{collection_type: :playlist})
+
+ expect(YtDlpRunnerMock, :run, fn _url, :get_media_attributes_for_collection, opts, _ot, _addl_opts ->
+ refute :break_on_existing in opts
+ refute Keyword.has_key?(opts, :download_archive)
+
+ {:ok, source_attributes_return_fixture()}
+ end)
+
+ SlowIndexingHelpers.index_and_enqueue_download_for_media_items(source)
+ end
+
+ test "a download archive is not used if the source has never been indexed before" do
+ source = source_fixture(%{collection_type: :channel, last_indexed_at: nil})
+
+ expect(YtDlpRunnerMock, :run, fn _url, :get_media_attributes_for_collection, opts, _ot, _addl_opts ->
+ refute :break_on_existing in opts
+ refute Keyword.has_key?(opts, :download_archive)
+
+ {:ok, source_attributes_return_fixture()}
+ end)
+
+ SlowIndexingHelpers.index_and_enqueue_download_for_media_items(source)
+ end
+
+ test "a download archive is not used if the index has been forced to run" do
+ source = source_fixture(%{collection_type: :channel})
+
+ expect(YtDlpRunnerMock, :run, fn _url, :get_media_attributes_for_collection, opts, _ot, _addl_opts ->
+ refute :break_on_existing in opts
+ refute Keyword.has_key?(opts, :download_archive)
+
+ {:ok, source_attributes_return_fixture()}
+ end)
+
+ SlowIndexingHelpers.index_and_enqueue_download_for_media_items(source, was_forced: true)
+ end
+
+ test "the download archive is formatted correctly and contains the right video" do
+ source = source_fixture(%{collection_type: :channel, last_indexed_at: now()})
+
+ media_items =
+ 1..21
+ |> Enum.map(fn n ->
+ media_item_fixture(%{source_id: source.id, uploaded_at: now_minus(n, :days)})
+ end)
+
+ expect(YtDlpRunnerMock, :run, fn _url, :get_media_attributes_for_collection, opts, _ot, _addl_opts ->
+ archive_file = Keyword.get(opts, :download_archive)
+ last_media_item = List.last(media_items)
+
+ assert File.read!(archive_file) == "youtube #{last_media_item.media_id}"
+
+ {:ok, source_attributes_return_fixture()}
+ end)
+
+ SlowIndexingHelpers.index_and_enqueue_download_for_media_items(source)
+ end
+ end
end
diff --git a/test/pinchflat/sources/source_deletion_worker_test.exs b/test/pinchflat/sources/source_deletion_worker_test.exs
new file mode 100644
index 0000000..fe2ec3d
--- /dev/null
+++ b/test/pinchflat/sources/source_deletion_worker_test.exs
@@ -0,0 +1,52 @@
+defmodule Pinchflat.Sources.SourceDeletionWorkerTest do
+ use Pinchflat.DataCase
+
+ import Pinchflat.MediaFixtures
+ import Pinchflat.SourcesFixtures
+
+ alias Pinchflat.Sources.SourceDeletionWorker
+
+ setup do
+ stub(UserScriptRunnerMock, :run, fn _event_type, _data -> :ok end)
+
+ {:ok, %{source: source_fixture()}}
+ end
+
+ describe "kickoff/3" do
+ test "starts the worker", %{source: source} do
+ assert [] = all_enqueued(worker: SourceDeletionWorker)
+ assert {:ok, _} = SourceDeletionWorker.kickoff(source)
+ assert [_] = all_enqueued(worker: SourceDeletionWorker)
+ end
+
+ test "can be called with additional job arguments", %{source: source} do
+ job_args = %{"delete_files" => true}
+
+ assert {:ok, _} = SourceDeletionWorker.kickoff(source, job_args)
+
+ assert_enqueued(worker: SourceDeletionWorker, args: %{"id" => source.id, "delete_files" => true})
+ end
+ end
+
+ describe "perform/1" do
+ test "deletes the source but leaves the files", %{source: source} do
+ media_item = media_item_with_attachments(%{source_id: source.id})
+
+ perform_job(SourceDeletionWorker, %{"id" => source.id})
+
+ assert_raise Ecto.NoResultsError, fn -> Repo.reload!(source) end
+ assert_raise Ecto.NoResultsError, fn -> Repo.reload!(media_item) end
+ assert File.exists?(media_item.media_filepath)
+ end
+
+ test "deletes the source and files if specified", %{source: source} do
+ media_item = media_item_with_attachments(%{source_id: source.id})
+
+ perform_job(SourceDeletionWorker, %{"id" => source.id, "delete_files" => true})
+
+ assert_raise Ecto.NoResultsError, fn -> Repo.reload!(source) end
+ assert_raise Ecto.NoResultsError, fn -> Repo.reload!(media_item) end
+ refute File.exists?(media_item.media_filepath)
+ end
+ end
+end
diff --git a/test/pinchflat/sources_test.exs b/test/pinchflat/sources_test.exs
index 65f48e0..44bae14 100644
--- a/test/pinchflat/sources_test.exs
+++ b/test/pinchflat/sources_test.exs
@@ -60,6 +60,33 @@ defmodule Pinchflat.SourcesTest do
end
end
+ describe "use_cookies?/2" do
+ test "returns true if the source has been set to use cookies" do
+ source = source_fixture(%{cookie_behaviour: :all_operations})
+ assert Sources.use_cookies?(source, :downloading)
+ end
+
+ test "returns false if the source has not been set to use cookies" do
+ source = source_fixture(%{cookie_behaviour: :disabled})
+ refute Sources.use_cookies?(source, :downloading)
+ end
+
+ test "returns true if the action is indexing and the source is set to :when_needed" do
+ source = source_fixture(%{cookie_behaviour: :when_needed})
+ assert Sources.use_cookies?(source, :indexing)
+ end
+
+ test "returns false if the action is downloading and the source is set to :when_needed" do
+ source = source_fixture(%{cookie_behaviour: :when_needed})
+ refute Sources.use_cookies?(source, :downloading)
+ end
+
+ test "returns true if the action is error_recovery and the source is set to :when_needed" do
+ source = source_fixture(%{cookie_behaviour: :when_needed})
+ assert Sources.use_cookies?(source, :error_recovery)
+ end
+ end
+
describe "list_sources/0" do
test "it returns all sources" do
source = source_fixture()
@@ -85,7 +112,7 @@ defmodule Pinchflat.SourcesTest do
describe "create_source/2" do
test "automatically sets the UUID" do
- expect(YtDlpRunnerMock, :run, &channel_mock/3)
+ expect(YtDlpRunnerMock, :run, &channel_mock/5)
valid_attrs = %{
media_profile_id: media_profile_fixture().id,
@@ -97,7 +124,7 @@ defmodule Pinchflat.SourcesTest do
end
test "UUID is not writable by the user" do
- expect(YtDlpRunnerMock, :run, &channel_mock/3)
+ expect(YtDlpRunnerMock, :run, &channel_mock/5)
valid_attrs = %{
media_profile_id: media_profile_fixture().id,
@@ -110,7 +137,7 @@ defmodule Pinchflat.SourcesTest do
end
test "creates a source and adds name + ID from runner response for channels" do
- expect(YtDlpRunnerMock, :run, &channel_mock/3)
+ expect(YtDlpRunnerMock, :run, &channel_mock/5)
valid_attrs = %{
media_profile_id: media_profile_fixture().id,
@@ -123,7 +150,7 @@ defmodule Pinchflat.SourcesTest do
end
test "creates a source and adds name + ID for playlists" do
- expect(YtDlpRunnerMock, :run, &playlist_mock/3)
+ expect(YtDlpRunnerMock, :run, &playlist_mock/5)
valid_attrs = %{
media_profile_id: media_profile_fixture().id,
@@ -136,7 +163,19 @@ defmodule Pinchflat.SourcesTest do
end
test "adds an error if the runner fails" do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot -> {:error, "some error", 1} end)
+ expect(YtDlpRunnerMock, :run, fn _url, :get_source_details, _opts, _ot, _addl -> {:error, "some error", 1} end)
+
+ valid_attrs = %{
+ media_profile_id: media_profile_fixture().id,
+ original_url: "https://www.youtube.com/channel/abc123"
+ }
+
+ assert {:error, %Ecto.Changeset{} = changeset} = Sources.create_source(valid_attrs)
+ assert "could not fetch source details from URL" in errors_on(changeset).original_url
+ end
+
+ test "adds an error if the runner succeeds but the result was invalid JSON" do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_source_details, _opts, _ot, _addl -> {:ok, "Not JSON"} end)
valid_attrs = %{
media_profile_id: media_profile_fixture().id,
@@ -148,7 +187,7 @@ defmodule Pinchflat.SourcesTest do
end
test "you can specify a custom custom_name" do
- expect(YtDlpRunnerMock, :run, &channel_mock/3)
+ expect(YtDlpRunnerMock, :run, &channel_mock/5)
valid_attrs = %{
media_profile_id: media_profile_fixture().id,
@@ -162,7 +201,7 @@ defmodule Pinchflat.SourcesTest do
end
test "friendly name is pulled from collection_name if not specified" do
- expect(YtDlpRunnerMock, :run, &channel_mock/3)
+ expect(YtDlpRunnerMock, :run, &channel_mock/5)
valid_attrs = %{
media_profile_id: media_profile_fixture().id,
@@ -175,7 +214,7 @@ defmodule Pinchflat.SourcesTest do
end
test "creation enforces uniqueness of collection_id scoped to the media_profile and title regex" do
- expect(YtDlpRunnerMock, :run, 2, fn _url, _opts, _ot ->
+ expect(YtDlpRunnerMock, :run, 2, fn _url, :get_source_details, _opts, _ot, _addl ->
{:ok,
Phoenix.json_library().encode!(%{
channel: "some channel name",
@@ -196,7 +235,7 @@ defmodule Pinchflat.SourcesTest do
end
test "creation lets you duplicate collection_ids and profiles as long as the regex is different" do
- expect(YtDlpRunnerMock, :run, 2, fn _url, _opts, _ot ->
+ expect(YtDlpRunnerMock, :run, 2, fn _url, :get_source_details, _opts, _ot, _addl ->
{:ok,
Phoenix.json_library().encode!(%{
channel: "some channel name",
@@ -220,7 +259,7 @@ defmodule Pinchflat.SourcesTest do
end
test "creation lets you duplicate collection_ids as long as the media profile is different" do
- expect(YtDlpRunnerMock, :run, 2, fn _url, _opts, _ot ->
+ expect(YtDlpRunnerMock, :run, 2, fn _url, :get_source_details, _opts, _ot, _addl ->
{:ok,
Phoenix.json_library().encode!(%{
channel: "some channel name",
@@ -244,8 +283,8 @@ defmodule Pinchflat.SourcesTest do
end
test "collection_type is inferred from source details" do
- expect(YtDlpRunnerMock, :run, &channel_mock/3)
- expect(YtDlpRunnerMock, :run, &playlist_mock/3)
+ expect(YtDlpRunnerMock, :run, &channel_mock/5)
+ expect(YtDlpRunnerMock, :run, &playlist_mock/5)
valid_attrs = %{
media_profile_id: media_profile_fixture().id,
@@ -264,13 +303,13 @@ defmodule Pinchflat.SourcesTest do
end
test "creation with invalid data fails fast and does not call the runner" do
- expect(YtDlpRunnerMock, :run, 0, &channel_mock/3)
+ expect(YtDlpRunnerMock, :run, 0, &channel_mock/5)
assert {:error, %Ecto.Changeset{}} = Sources.create_source(@invalid_source_attrs)
end
test "creation will schedule the indexing task" do
- expect(YtDlpRunnerMock, :run, &channel_mock/3)
+ expect(YtDlpRunnerMock, :run, &channel_mock/5)
valid_attrs = %{
media_profile_id: media_profile_fixture().id,
@@ -282,8 +321,36 @@ defmodule Pinchflat.SourcesTest do
assert_enqueued(worker: MediaCollectionIndexingWorker, args: %{"id" => source.id})
end
+ test "creation will schedule a fast indexing job if the fast_index option is set" do
+ expect(YtDlpRunnerMock, :run, &channel_mock/5)
+
+ valid_attrs = %{
+ media_profile_id: media_profile_fixture().id,
+ original_url: "https://www.youtube.com/channel/abc123",
+ fast_index: true
+ }
+
+ assert {:ok, %Source{} = source} = Sources.create_source(valid_attrs)
+
+ assert_enqueued(worker: FastIndexingWorker, args: %{"id" => source.id})
+ end
+
+ test "creation will not schedule a fast indexing job if the fast_index option is not set" do
+ expect(YtDlpRunnerMock, :run, &channel_mock/5)
+
+ valid_attrs = %{
+ media_profile_id: media_profile_fixture().id,
+ original_url: "https://www.youtube.com/channel/abc123",
+ fast_index: false
+ }
+
+ assert {:ok, %Source{}} = Sources.create_source(valid_attrs)
+
+ refute_enqueued(worker: FastIndexingWorker)
+ end
+
test "creation schedules an index test even if the index frequency is 0" do
- expect(YtDlpRunnerMock, :run, &channel_mock/3)
+ expect(YtDlpRunnerMock, :run, &channel_mock/5)
valid_attrs = %{
media_profile_id: media_profile_fixture().id,
@@ -297,7 +364,7 @@ defmodule Pinchflat.SourcesTest do
end
test "fast_index forces the index frequency to be a default value" do
- expect(YtDlpRunnerMock, :run, &channel_mock/3)
+ expect(YtDlpRunnerMock, :run, &channel_mock/5)
valid_attrs = %{
media_profile_id: media_profile_fixture().id,
@@ -312,7 +379,7 @@ defmodule Pinchflat.SourcesTest do
end
test "disabling fast index will not change the index frequency" do
- expect(YtDlpRunnerMock, :run, &channel_mock/3)
+ expect(YtDlpRunnerMock, :run, &channel_mock/5)
valid_attrs = %{
media_profile_id: media_profile_fixture().id,
@@ -327,7 +394,7 @@ defmodule Pinchflat.SourcesTest do
end
test "creating will kickoff a metadata storage worker" do
- expect(YtDlpRunnerMock, :run, &channel_mock/3)
+ expect(YtDlpRunnerMock, :run, &channel_mock/5)
valid_attrs = %{
media_profile_id: media_profile_fixture().id,
@@ -342,9 +409,74 @@ defmodule Pinchflat.SourcesTest do
end
end
- describe "create_source/2 when testing options" do
+ describe "create_source/2 when testing yt-dlp options" do
+ test "sets use_cookies to true if the source has been set to use cookies" do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_source_details, _opts, _ot, addl ->
+ assert Keyword.get(addl, :use_cookies)
+
+ {:ok, playlist_return()}
+ end)
+
+ valid_attrs = %{
+ media_profile_id: media_profile_fixture().id,
+ original_url: "https://www.youtube.com/channel/abc123",
+ cookie_behaviour: :all_operations
+ }
+
+ assert {:ok, %Source{}} = Sources.create_source(valid_attrs)
+ end
+
+ test "does not set use_cookies if the source uses cookies when needed" do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_source_details, _opts, _ot, addl ->
+ refute Keyword.get(addl, :use_cookies)
+
+ {:ok, playlist_return()}
+ end)
+
+ valid_attrs = %{
+ media_profile_id: media_profile_fixture().id,
+ original_url: "https://www.youtube.com/channel/abc123",
+ cookie_behaviour: :when_needed
+ }
+
+ assert {:ok, %Source{}} = Sources.create_source(valid_attrs)
+ end
+
+ test "does not set use_cookies if the source has not been set to use cookies" do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_source_details, _opts, _ot, addl ->
+ refute Keyword.get(addl, :use_cookies)
+
+ {:ok, playlist_return()}
+ end)
+
+ valid_attrs = %{
+ media_profile_id: media_profile_fixture().id,
+ original_url: "https://www.youtube.com/channel/abc123",
+ cookie_behaviour: :disabled
+ }
+
+ assert {:ok, %Source{}} = Sources.create_source(valid_attrs)
+ end
+
+ test "skips sleep interval" do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_source_details, _opts, _ot, addl ->
+ assert Keyword.get(addl, :skip_sleep_interval)
+
+ {:ok, playlist_return()}
+ end)
+
+ valid_attrs = %{
+ media_profile_id: media_profile_fixture().id,
+ original_url: "https://www.youtube.com/channel/abc123"
+ }
+
+ assert {:ok, %Source{}} = Sources.create_source(valid_attrs)
+ end
+ end
+
+ describe "create_source/2 when testing its options" do
test "run_post_commit_tasks: false won't enqueue post-commit tasks" do
- expect(YtDlpRunnerMock, :run, &channel_mock/3)
+ expect(YtDlpRunnerMock, :run, &channel_mock/5)
valid_attrs = %{
media_profile_id: media_profile_fixture().id,
@@ -368,7 +500,7 @@ defmodule Pinchflat.SourcesTest do
end
test "updates with invalid data fails fast and does not call the runner" do
- expect(YtDlpRunnerMock, :run, 0, &channel_mock/3)
+ expect(YtDlpRunnerMock, :run, 0, &channel_mock/5)
source = source_fixture()
@@ -376,7 +508,7 @@ defmodule Pinchflat.SourcesTest do
end
test "updating the original_url will re-fetch the source details for channels" do
- expect(YtDlpRunnerMock, :run, &channel_mock/3)
+ expect(YtDlpRunnerMock, :run, &channel_mock/5)
source = source_fixture()
update_attrs = %{original_url: "https://www.youtube.com/channel/abc123"}
@@ -387,7 +519,7 @@ defmodule Pinchflat.SourcesTest do
end
test "updating the original_url will re-fetch the source details for playlists" do
- expect(YtDlpRunnerMock, :run, &playlist_mock/3)
+ expect(YtDlpRunnerMock, :run, &playlist_mock/5)
source = source_fixture()
update_attrs = %{original_url: "https://www.youtube.com/playlist?list=abc123"}
@@ -398,7 +530,7 @@ defmodule Pinchflat.SourcesTest do
end
test "not updating the original_url will not re-fetch the source details" do
- expect(YtDlpRunnerMock, :run, 0, &channel_mock/3)
+ expect(YtDlpRunnerMock, :run, 0, &channel_mock/5)
source = source_fixture()
update_attrs = %{name: "some updated name"}
@@ -406,6 +538,100 @@ defmodule Pinchflat.SourcesTest do
assert {:ok, %Source{}} = Sources.update_source(source, update_attrs)
end
+ test "updates with invalid data returns error changeset" do
+ source = source_fixture()
+
+ assert {:error, %Ecto.Changeset{}} =
+ Sources.update_source(source, @invalid_source_attrs)
+
+ assert source == Sources.get_source!(source.id)
+ end
+
+ test "updating will kickoff a metadata storage worker if the original_url changes" do
+ expect(YtDlpRunnerMock, :run, &playlist_mock/5)
+ source = source_fixture()
+ update_attrs = %{original_url: "https://www.youtube.com/channel/cba321"}
+
+ assert {:ok, %Source{} = source} = Sources.update_source(source, update_attrs)
+
+ assert_enqueued(worker: SourceMetadataStorageWorker, args: %{"id" => source.id})
+ end
+
+ test "updating will not kickoff a metadata storage worker other attrs change" do
+ source = source_fixture()
+ update_attrs = %{name: "some new name"}
+
+ assert {:ok, %Source{}} = Sources.update_source(source, update_attrs)
+
+ refute_enqueued(worker: SourceMetadataStorageWorker)
+ end
+ end
+
+ describe "update_source/3 when testing media download tasks" do
+ test "enabling the download_media attribute will schedule a download task" do
+ source = source_fixture(download_media: false)
+ media_item = media_item_fixture(source_id: source.id, media_filepath: nil)
+ update_attrs = %{download_media: true}
+
+ refute_enqueued(worker: MediaDownloadWorker)
+ assert {:ok, %Source{}} = Sources.update_source(source, update_attrs)
+ assert_enqueued(worker: MediaDownloadWorker, args: %{"id" => media_item.id})
+ end
+
+ test "disabling the download_media attribute will cancel the download task" do
+ source = source_fixture(download_media: true, enabled: true)
+ media_item = media_item_fixture(source_id: source.id, media_filepath: nil)
+ update_attrs = %{download_media: false}
+ DownloadingHelpers.enqueue_pending_download_tasks(source)
+
+ assert_enqueued(worker: MediaDownloadWorker, args: %{"id" => media_item.id})
+ assert {:ok, %Source{}} = Sources.update_source(source, update_attrs)
+ refute_enqueued(worker: MediaDownloadWorker)
+ end
+
+ test "enabling download_media will not schedule a task if the source is disabled" do
+ source = source_fixture(download_media: false, enabled: false)
+ _media_item = media_item_fixture(source_id: source.id, media_filepath: nil)
+ update_attrs = %{download_media: true}
+
+ refute_enqueued(worker: MediaDownloadWorker)
+ assert {:ok, %Source{}} = Sources.update_source(source, update_attrs)
+ refute_enqueued(worker: MediaDownloadWorker)
+ end
+
+ test "disabling a source will cancel any pending download tasks" do
+ source = source_fixture(download_media: true, enabled: true)
+ media_item = media_item_fixture(source_id: source.id, media_filepath: nil)
+ update_attrs = %{enabled: false}
+ DownloadingHelpers.enqueue_pending_download_tasks(source)
+
+ assert_enqueued(worker: MediaDownloadWorker, args: %{"id" => media_item.id})
+ assert {:ok, %Source{}} = Sources.update_source(source, update_attrs)
+ refute_enqueued(worker: MediaDownloadWorker)
+ end
+
+ test "enabling a source will schedule a download task if download_media is true" do
+ source = source_fixture(download_media: true, enabled: false)
+ media_item = media_item_fixture(source_id: source.id, media_filepath: nil)
+ update_attrs = %{enabled: true}
+
+ refute_enqueued(worker: MediaDownloadWorker)
+ assert {:ok, %Source{}} = Sources.update_source(source, update_attrs)
+ assert_enqueued(worker: MediaDownloadWorker, args: %{"id" => media_item.id})
+ end
+
+ test "enabling a source will not schedule a download task if download_media is false" do
+ source = source_fixture(download_media: false, enabled: false)
+ _media_item = media_item_fixture(source_id: source.id, media_filepath: nil)
+ update_attrs = %{enabled: true}
+
+ refute_enqueued(worker: MediaDownloadWorker)
+ assert {:ok, %Source{}} = Sources.update_source(source, update_attrs)
+ refute_enqueued(worker: MediaDownloadWorker)
+ end
+ end
+
+ describe "update_source/3 when testing slow indexing" do
test "updating the index frequency to >0 will re-schedule the indexing task" do
source = source_fixture()
update_attrs = %{index_frequency_minutes: 123}
@@ -450,27 +676,47 @@ defmodule Pinchflat.SourcesTest do
refute_enqueued(worker: MediaCollectionIndexingWorker, args: %{"id" => source.id})
end
- test "enabling the download_media attribute will schedule a download task" do
- source = source_fixture(download_media: false)
- media_item = media_item_fixture(source_id: source.id, media_filepath: nil)
- update_attrs = %{download_media: true}
+ test "disabling a source will delete any pending tasks" do
+ source = source_fixture()
+ update_attrs = %{enabled: false}
+
+ {:ok, job} = Oban.insert(MediaCollectionIndexingWorker.new(%{"id" => source.id}))
+ task = task_fixture(source_id: source.id, job_id: job.id)
- refute_enqueued(worker: MediaDownloadWorker)
assert {:ok, %Source{}} = Sources.update_source(source, update_attrs)
- assert_enqueued(worker: MediaDownloadWorker, args: %{"id" => media_item.id})
+
+ assert_raise Ecto.NoResultsError, fn -> Repo.reload!(task) end
end
- test "disabling the download_media attribute will cancel the download task" do
- source = source_fixture(download_media: true)
- media_item = media_item_fixture(source_id: source.id, media_filepath: nil)
- update_attrs = %{download_media: false}
- DownloadingHelpers.enqueue_pending_download_tasks(source)
+ test "updating the index frequency will not create a task if the source is disabled" do
+ source = source_fixture(enabled: false)
+ update_attrs = %{index_frequency_minutes: 123}
- assert_enqueued(worker: MediaDownloadWorker, args: %{"id" => media_item.id})
+ refute_enqueued(worker: MediaCollectionIndexingWorker)
assert {:ok, %Source{}} = Sources.update_source(source, update_attrs)
- refute_enqueued(worker: MediaDownloadWorker)
+ refute_enqueued(worker: MediaCollectionIndexingWorker)
end
+ test "enabling a source will create a task if the index frequency is >0" do
+ source = source_fixture(enabled: false, index_frequency_minutes: 123)
+ update_attrs = %{enabled: true}
+
+ refute_enqueued(worker: MediaCollectionIndexingWorker)
+ assert {:ok, %Source{}} = Sources.update_source(source, update_attrs)
+ assert_enqueued(worker: MediaCollectionIndexingWorker, args: %{"id" => source.id})
+ end
+
+ test "enabling a source will not create a task if the index frequency is 0" do
+ source = source_fixture(enabled: false, index_frequency_minutes: 0)
+ update_attrs = %{enabled: true}
+
+ refute_enqueued(worker: MediaCollectionIndexingWorker)
+ assert {:ok, %Source{}} = Sources.update_source(source, update_attrs)
+ refute_enqueued(worker: MediaCollectionIndexingWorker)
+ end
+ end
+
+ describe "update_source/3 when testing fast indexing" do
test "enabling fast_index will schedule a fast indexing task" do
source = source_fixture(fast_index: false)
update_attrs = %{fast_index: true}
@@ -491,15 +737,6 @@ defmodule Pinchflat.SourcesTest do
refute_enqueued(worker: FastIndexingWorker)
end
- test "updates with invalid data returns error changeset" do
- source = source_fixture()
-
- assert {:error, %Ecto.Changeset{}} =
- Sources.update_source(source, @invalid_source_attrs)
-
- assert source == Sources.get_source!(source.id)
- end
-
test "fast_index forces the index frequency to be a default value" do
source = source_fixture(%{fast_index: true})
update_attrs = %{index_frequency_minutes: 0}
@@ -518,23 +755,43 @@ defmodule Pinchflat.SourcesTest do
assert source.index_frequency_minutes == 0
end
- test "updating will kickoff a metadata storage worker if the original_url changes" do
- expect(YtDlpRunnerMock, :run, &playlist_mock/3)
+ test "disabling a source will delete any pending tasks" do
source = source_fixture()
- update_attrs = %{original_url: "https://www.youtube.com/channel/cba321"}
+ update_attrs = %{enabled: false}
- assert {:ok, %Source{} = source} = Sources.update_source(source, update_attrs)
-
- assert_enqueued(worker: SourceMetadataStorageWorker, args: %{"id" => source.id})
- end
-
- test "updating will not kickoff a metadata storage worker other attrs change" do
- source = source_fixture()
- update_attrs = %{name: "some new name"}
+ {:ok, job} = Oban.insert(FastIndexingWorker.new(%{"id" => source.id}))
+ task = task_fixture(source_id: source.id, job_id: job.id)
assert {:ok, %Source{}} = Sources.update_source(source, update_attrs)
- refute_enqueued(worker: SourceMetadataStorageWorker)
+ assert_raise Ecto.NoResultsError, fn -> Repo.reload!(task) end
+ end
+
+ test "updating fast indexing will not create a task if the source is disabled" do
+ source = source_fixture(enabled: false, fast_index: false)
+ update_attrs = %{fast_index: true}
+
+ refute_enqueued(worker: FastIndexingWorker)
+ assert {:ok, %Source{}} = Sources.update_source(source, update_attrs)
+ refute_enqueued(worker: FastIndexingWorker)
+ end
+
+ test "enabling a source will create a task if fast_index is true" do
+ source = source_fixture(enabled: false, fast_index: true)
+ update_attrs = %{enabled: true}
+
+ refute_enqueued(worker: FastIndexingWorker)
+ assert {:ok, %Source{}} = Sources.update_source(source, update_attrs)
+ assert_enqueued(worker: FastIndexingWorker, args: %{"id" => source.id})
+ end
+
+ test "enabling a source will not create a task if fast_index is false" do
+ source = source_fixture(enabled: false, fast_index: false)
+ update_attrs = %{enabled: true}
+
+ refute_enqueued(worker: FastIndexingWorker)
+ assert {:ok, %Source{}} = Sources.update_source(source, update_attrs)
+ refute_enqueued(worker: FastIndexingWorker)
end
end
@@ -617,7 +874,7 @@ defmodule Pinchflat.SourcesTest do
describe "delete_source/2 when deleting files" do
setup do
- stub(UserScriptRunnerMock, :run, fn _event_type, _data -> :ok end)
+ stub(UserScriptRunnerMock, :run, fn _event_type, _data -> {:ok, "", 0} end)
:ok
end
@@ -681,6 +938,34 @@ defmodule Pinchflat.SourcesTest do
end
end
+ describe "change_source/3 when testing min/max duration validations" do
+ test "succeeds if min and max are nil" do
+ source = source_fixture()
+
+ assert %{errors: []} = Sources.change_source(source, %{min_duration_seconds: nil, max_duration_seconds: nil})
+ end
+
+ test "succeeds if either min or max is nil" do
+ source = source_fixture()
+
+ assert %{errors: []} = Sources.change_source(source, %{min_duration_seconds: nil, max_duration_seconds: 100})
+ assert %{errors: []} = Sources.change_source(source, %{min_duration_seconds: 100, max_duration_seconds: nil})
+ end
+
+ test "succeeds if min is less than max" do
+ source = source_fixture()
+
+ assert %{errors: []} = Sources.change_source(source, %{min_duration_seconds: 100, max_duration_seconds: 200})
+ end
+
+ test "fails if min is greater than or equal to max" do
+ source = source_fixture()
+
+ assert %{errors: [_]} = Sources.change_source(source, %{min_duration_seconds: 200, max_duration_seconds: 100})
+ assert %{errors: [_]} = Sources.change_source(source, %{min_duration_seconds: 100, max_duration_seconds: 100})
+ end
+ end
+
describe "change_source/3 when testing original_url validation" do
test "succeeds when an original URL is valid" do
source = source_fixture()
@@ -736,29 +1021,31 @@ defmodule Pinchflat.SourcesTest do
end
end
- defp playlist_mock(_url, _opts, _ot) do
- {
- :ok,
- Phoenix.json_library().encode!(%{
- channel: nil,
- channel_id: nil,
- playlist_id: "some_playlist_id_#{:rand.uniform(1_000_000)}",
- playlist_title: "some playlist name"
- })
- }
+ defp playlist_mock(_url, :get_source_details, _opts, _ot, _addl) do
+ {:ok, playlist_return()}
end
- defp channel_mock(_url, _opts, _ot) do
+ defp channel_mock(_url, :get_source_details, _opts, _ot, _addl) do
+ {:ok, channel_return()}
+ end
+
+ defp playlist_return do
+ Phoenix.json_library().encode!(%{
+ channel: nil,
+ channel_id: nil,
+ playlist_id: "some_playlist_id_#{:rand.uniform(1_000_000)}",
+ playlist_title: "some playlist name"
+ })
+ end
+
+ defp channel_return do
channel_id = "some_channel_id_#{:rand.uniform(1_000_000)}"
- {
- :ok,
- Phoenix.json_library().encode!(%{
- channel: "some channel name",
- channel_id: channel_id,
- playlist_id: channel_id,
- playlist_title: "some channel name - videos"
- })
- }
+ Phoenix.json_library().encode!(%{
+ channel: "some channel name",
+ channel_id: channel_id,
+ playlist_id: channel_id,
+ playlist_title: "some channel name - videos"
+ })
end
end
diff --git a/test/pinchflat/tasks_test.exs b/test/pinchflat/tasks_test.exs
index 36140d5..53d1c32 100644
--- a/test/pinchflat/tasks_test.exs
+++ b/test/pinchflat/tasks_test.exs
@@ -12,7 +12,7 @@ defmodule Pinchflat.TasksTest do
@invalid_attrs %{job_id: nil}
describe "schema" do
- test "it deletes a task when the job gets deleted" do
+ test "deletes a task when the job gets deleted" do
task = Repo.preload(task_fixture(), [:job])
{:ok, _} = Repo.delete(task.job)
@@ -20,7 +20,7 @@ defmodule Pinchflat.TasksTest do
assert_raise Ecto.NoResultsError, fn -> Repo.reload!(task) end
end
- test "it does not delete the other record when a job gets deleted" do
+ test "does not delete the other record when a job gets deleted" do
task = Repo.preload(task_fixture(), [:source, :job])
{:ok, _} = Repo.delete(task.job)
@@ -30,21 +30,21 @@ defmodule Pinchflat.TasksTest do
end
describe "list_tasks/0" do
- test "it returns all tasks" do
+ test "returns all tasks" do
task = task_fixture()
assert Tasks.list_tasks() == [task]
end
end
describe "list_tasks_for/3" do
- test "it lets you specify which record type/ID to join on" do
+ test "lets you specify which record type/ID to join on" do
source = source_fixture()
task = task_fixture(source_id: source.id)
assert Tasks.list_tasks_for(source, nil, [:available]) == [task]
end
- test "it lets you specify which job states to include" do
+ test "lets you specify which job states to include" do
source = source_fixture()
task = task_fixture(source_id: source.id)
@@ -52,7 +52,7 @@ defmodule Pinchflat.TasksTest do
assert Tasks.list_tasks_for(source, nil, [:cancelled]) == []
end
- test "it lets you specify which worker to include" do
+ test "lets you specify which worker to include" do
source = source_fixture()
task = task_fixture(source_id: source.id)
@@ -60,7 +60,7 @@ defmodule Pinchflat.TasksTest do
assert Tasks.list_tasks_for(source, "FooBarWorker") == []
end
- test "it includes all workers if no worker is specified" do
+ test "includes all workers if no worker is specified" do
source = source_fixture()
task = task_fixture(source_id: source.id)
@@ -68,32 +68,8 @@ defmodule Pinchflat.TasksTest do
end
end
- describe "list_pending_tasks_for/3" do
- test "it lists pending tasks" do
- source = source_fixture()
- task = task_fixture(source_id: source.id)
-
- assert Tasks.list_pending_tasks_for(source) == [task]
- end
-
- test "it does not list non-pending tasks" do
- task = Repo.preload(task_fixture(), [:job, :source])
- :ok = Oban.cancel_job(task.job)
-
- assert Tasks.list_pending_tasks_for(task.source) == []
- end
-
- test "it lets you specify which worker to include" do
- source = source_fixture()
- task = task_fixture(source_id: source.id)
-
- assert Tasks.list_pending_tasks_for(source, "TestJobWorker") == [task]
- assert Tasks.list_pending_tasks_for(source, "FooBarWorker") == []
- end
- end
-
describe "get_task!/1" do
- test "it returns the task with given id" do
+ test "returns the task with given id" do
task = task_fixture()
assert Tasks.get_task!(task.id) == task
end
@@ -132,7 +108,7 @@ defmodule Pinchflat.TasksTest do
end
describe "create_job_with_task/2" do
- test "it enqueues the given job" do
+ test "enqueues the given job" do
media_item = media_item_fixture()
refute_enqueued(worker: TestJobWorker)
@@ -140,7 +116,7 @@ defmodule Pinchflat.TasksTest do
assert_enqueued(worker: TestJobWorker)
end
- test "it creates a task record if successful" do
+ test "creates a task record if successful" do
source = source_fixture()
assert {:ok, %Task{} = task} = Tasks.create_job_with_task(TestJobWorker.new(%{}), source)
@@ -148,7 +124,7 @@ defmodule Pinchflat.TasksTest do
assert task.source_id == source.id
end
- test "it returns an error if the job already exists" do
+ test "returns an error if the job already exists" do
source = source_fixture()
job = TestJobWorker.new(%{foo: "bar"}, unique: [period: :infinity])
@@ -156,7 +132,7 @@ defmodule Pinchflat.TasksTest do
assert {:error, :duplicate_job} = Tasks.create_job_with_task(job, source)
end
- test "it returns an error if the job fails to enqueue" do
+ test "returns an error if the job fails to enqueue" do
source = source_fixture()
assert {:error, %Ecto.Changeset{}} = Tasks.create_job_with_task(%Ecto.Changeset{}, source)
@@ -181,7 +157,7 @@ defmodule Pinchflat.TasksTest do
end
describe "delete_tasks_for/2" do
- test "it deletes tasks attached to a source" do
+ test "deletes tasks attached to a source" do
source = source_fixture()
task = task_fixture(source_id: source.id)
@@ -189,7 +165,7 @@ defmodule Pinchflat.TasksTest do
assert_raise Ecto.NoResultsError, fn -> Tasks.get_task!(task.id) end
end
- test "it deletes the tasks attached to a media_item" do
+ test "deletes the tasks attached to a media_item" do
media_item = media_item_fixture()
task = task_fixture(media_item_id: media_item.id)
@@ -208,6 +184,17 @@ defmodule Pinchflat.TasksTest do
assert_raise Ecto.NoResultsError, fn -> Repo.reload!(task) end
end
+ test "deletion can specify which states to include" do
+ source = source_fixture()
+ task = task_fixture(source_id: source.id)
+
+ assert :ok = Tasks.delete_tasks_for(source, nil, [:executing])
+ assert Repo.reload!(task)
+
+ assert :ok = Tasks.delete_tasks_for(source, nil, [:available])
+ assert_raise Ecto.NoResultsError, fn -> Repo.reload!(task) end
+ end
+
test "deletion does not impact unintended records" do
source = source_fixture()
task = task_fixture(source_id: source.id)
@@ -221,7 +208,7 @@ defmodule Pinchflat.TasksTest do
end
describe "delete_pending_tasks_for/1" do
- test "it deletes pending tasks attached to a source" do
+ test "deletes pending tasks attached to a source" do
source = source_fixture()
task = task_fixture(source_id: source.id)
@@ -229,7 +216,7 @@ defmodule Pinchflat.TasksTest do
assert_raise Ecto.NoResultsError, fn -> Tasks.get_task!(task.id) end
end
- test "it does not delete non-pending tasks" do
+ test "does not delete non-pending tasks" do
source = source_fixture()
task = Repo.preload(task_fixture(source_id: source.id), :job)
:ok = Oban.cancel_job(task.job)
@@ -238,7 +225,7 @@ defmodule Pinchflat.TasksTest do
assert Tasks.get_task!(task.id)
end
- test "it works on media_items" do
+ test "works on media_items" do
media_item = media_item_fixture()
pending_task = task_fixture(media_item_id: media_item.id)
cancelled_task = Repo.preload(task_fixture(media_item_id: media_item.id), :job)
@@ -259,10 +246,23 @@ defmodule Pinchflat.TasksTest do
assert :ok = Tasks.delete_pending_tasks_for(media_item, "TestJobWorker")
assert_raise Ecto.NoResultsError, fn -> Repo.reload!(task) end
end
+
+ test "deletion can optionally include executing tasks" do
+ source = source_fixture()
+ task = task_fixture(source_id: source.id)
+
+ from(Oban.Job, where: [id: ^task.job_id], update: [set: [state: "executing"]])
+ |> Repo.update_all([])
+
+ assert :ok = Tasks.delete_pending_tasks_for(source, nil, include_executing: false)
+ assert Repo.reload!(task)
+ assert :ok = Tasks.delete_pending_tasks_for(source, nil, include_executing: true)
+ assert_raise Ecto.NoResultsError, fn -> Repo.reload!(task) end
+ end
end
describe "change_task/1" do
- test "it returns a task changeset" do
+ test "returns a task changeset" do
task = task_fixture()
assert %Ecto.Changeset{} = Tasks.change_task(task)
end
diff --git a/test/pinchflat/utils/filesystem_utils_test.exs b/test/pinchflat/utils/filesystem_utils_test.exs
index a964e48..33b5c8d 100644
--- a/test/pinchflat/utils/filesystem_utils_test.exs
+++ b/test/pinchflat/utils/filesystem_utils_test.exs
@@ -37,6 +37,46 @@ defmodule Pinchflat.Utils.FilesystemUtilsTest do
end
end
+ describe "filepaths_reference_same_file?/2" do
+ setup do
+ filepath = FilesystemUtils.generate_metadata_tmpfile(:json)
+
+ on_exit(fn -> File.rm!(filepath) end)
+
+ {:ok, %{filepath: filepath}}
+ end
+
+ test "returns true if the files are the same", %{filepath: filepath} do
+ assert FilesystemUtils.filepaths_reference_same_file?(filepath, filepath)
+ end
+
+ test "returns true if different filepaths point to the same file", %{filepath: filepath} do
+ short_path = Path.expand(filepath)
+ long_path = Path.join(["/tmp", "..", filepath])
+
+ assert short_path != long_path
+ assert FilesystemUtils.filepaths_reference_same_file?(short_path, long_path)
+ end
+
+ test "returns true if the files are symlinked", %{filepath: filepath} do
+ tmpfile_directory = Application.get_env(:pinchflat, :tmpfile_directory)
+ other_filepath = Path.join([tmpfile_directory, "symlink.json"])
+ :ok = File.ln_s!(filepath, other_filepath)
+
+ assert FilesystemUtils.filepaths_reference_same_file?(filepath, other_filepath)
+
+ File.rm!(other_filepath)
+ end
+
+ test "returns false if the files are different", %{filepath: filepath} do
+ other_filepath = FilesystemUtils.generate_metadata_tmpfile(:json)
+
+ refute FilesystemUtils.filepaths_reference_same_file?(filepath, other_filepath)
+
+ File.rm!(other_filepath)
+ end
+ end
+
describe "generate_metadata_tmpfile/1" do
test "creates a tmpfile and returns its path" do
res = FilesystemUtils.generate_metadata_tmpfile(:json)
diff --git a/test/pinchflat/utils/map_utils_test.exs b/test/pinchflat/utils/map_utils_test.exs
new file mode 100644
index 0000000..c16356a
--- /dev/null
+++ b/test/pinchflat/utils/map_utils_test.exs
@@ -0,0 +1,31 @@
+defmodule Pinchflat.Utils.MapUtilsTest do
+ use Pinchflat.DataCase
+
+ alias Pinchflat.Utils.MapUtils
+
+ describe "from_nested_list/1" do
+ test "creates a map from a nested 2-element tuple list" do
+ list = [
+ {"key1", "value1"},
+ {"key2", "value2"}
+ ]
+
+ assert MapUtils.from_nested_list(list) == %{
+ "key1" => "value1",
+ "key2" => "value2"
+ }
+ end
+
+ test "creates a map from a nested 2-element list of lists" do
+ list = [
+ ["key1", "value1"],
+ ["key2", "value2"]
+ ]
+
+ assert MapUtils.from_nested_list(list) == %{
+ "key1" => "value1",
+ "key2" => "value2"
+ }
+ end
+ end
+end
diff --git a/test/pinchflat/utils/number_utils_test.exs b/test/pinchflat/utils/number_utils_test.exs
index db17e1c..1eae570 100644
--- a/test/pinchflat/utils/number_utils_test.exs
+++ b/test/pinchflat/utils/number_utils_test.exs
@@ -47,4 +47,21 @@ defmodule Pinchflat.Utils.NumberUtilsTest do
assert NumberUtils.human_byte_size(nil) == {0, "B"}
end
end
+
+ describe "add_jitter/2" do
+ test "returns 0 when the number is less than or equal to 0" do
+ assert NumberUtils.add_jitter(0) == 0
+ assert NumberUtils.add_jitter(-1) == 0
+ end
+
+ test "returns the number with jitter added" do
+ assert NumberUtils.add_jitter(100) in 100..150
+ end
+
+ test "optionally takes a jitter percentage" do
+ assert NumberUtils.add_jitter(100, 0.1) in 90..110
+ assert NumberUtils.add_jitter(100, 0.5) in 50..150
+ assert NumberUtils.add_jitter(100, 1) in 0..200
+ end
+ end
end
diff --git a/test/pinchflat/utils/string_utils_test.exs b/test/pinchflat/utils/string_utils_test.exs
index 374b293..767f74b 100644
--- a/test/pinchflat/utils/string_utils_test.exs
+++ b/test/pinchflat/utils/string_utils_test.exs
@@ -28,17 +28,19 @@ defmodule Pinchflat.Utils.StringUtilsTest do
end
end
- describe "truncate/2" do
- test "truncates a string to the given length and adds ..." do
- assert StringUtils.truncate("hello world", 5) == "hello..."
+ describe "double_brace/1" do
+ test "wraps a string in double braces" do
+ assert StringUtils.double_brace("hello") == "{{ hello }}"
+ end
+ end
+
+ describe "wrap_string/1" do
+ test "returns strings as-is" do
+ assert StringUtils.wrap_string("hello") == "hello"
end
- test "breaks on a word boundary" do
- assert StringUtils.truncate("hello world", 7) == "hello..."
- end
-
- test "does not truncate a string shorter than the given length" do
- assert StringUtils.truncate("hello", 10) == "hello"
+ test "returns other values as inspected strings" do
+ assert StringUtils.wrap_string(1) == "1"
end
end
end
diff --git a/test/pinchflat/yt_dlp/command_runner_test.exs b/test/pinchflat/yt_dlp/command_runner_test.exs
index 6dbb856..51653e3 100644
--- a/test/pinchflat/yt_dlp/command_runner_test.exs
+++ b/test/pinchflat/yt_dlp/command_runner_test.exs
@@ -1,6 +1,7 @@
defmodule Pinchflat.YtDlp.CommandRunnerTest do
use Pinchflat.DataCase
+ alias Pinchflat.Settings
alias Pinchflat.Utils.FilesystemUtils
alias Pinchflat.YtDlp.CommandRunner, as: Runner
@@ -13,30 +14,36 @@ defmodule Pinchflat.YtDlp.CommandRunnerTest do
end
describe "run/4" do
- test "it returns the output and status when the command succeeds" do
- assert {:ok, _output} = Runner.run(@media_url, [], "")
+ test "returns the output and status when the command succeeds" do
+ assert {:ok, _output} = Runner.run(@media_url, :foo, [], "")
end
- test "it includes the media url as the first argument" do
- assert {:ok, output} = Runner.run(@media_url, [:ignore_errors], "")
+ test "considers a 101 exit code as being successful" do
+ wrap_executable("/app/test/support/scripts/yt-dlp-mocks/101_exit_code.sh", fn ->
+ assert {:ok, _output} = Runner.run(@media_url, :foo, [], "")
+ end)
+ end
+
+ test "includes the media url as the first argument" do
+ assert {:ok, output} = Runner.run(@media_url, :foo, [:ignore_errors], "")
assert String.contains?(output, "#{@media_url} --ignore-errors")
end
- test "it automatically includes the --print-to-file flag" do
- assert {:ok, output} = Runner.run(@media_url, [], "%(id)s")
+ test "automatically includes the --print-to-file flag" do
+ assert {:ok, output} = Runner.run(@media_url, :foo, [], "%(id)s")
assert String.contains?(output, "--print-to-file %(id)s /tmp/")
end
- test "it returns the output and status when the command fails" do
+ test "returns the output and status when the command fails" do
wrap_executable("/bin/false", fn ->
- assert {:error, "", 1} = Runner.run(@media_url, [], "")
+ assert {:error, "", 1} = Runner.run(@media_url, :foo, [], "")
end)
end
test "optionally lets you specify an output_filepath" do
- assert {:ok, output} = Runner.run(@media_url, [], "%(id)s", output_filepath: "/tmp/yt-dlp-output.json")
+ assert {:ok, output} = Runner.run(@media_url, :foo, [], "%(id)s", output_filepath: "/tmp/yt-dlp-output.json")
assert String.contains?(output, "--print-to-file %(id)s /tmp/yt-dlp-output.json")
end
@@ -51,18 +58,26 @@ defmodule Pinchflat.YtDlp.CommandRunnerTest do
{:ok, cookie_file: cookie_file, yt_dlp_file: yt_dlp_file}
end
- test "includes cookie options when cookies.txt exists", %{cookie_file: cookie_file} do
+ test "includes cookie options when cookies.txt exists and enabled", %{cookie_file: cookie_file} do
FilesystemUtils.write_p!(cookie_file, "cookie data")
- assert {:ok, output} = Runner.run(@media_url, [], "")
+ assert {:ok, output} = Runner.run(@media_url, :foo, [], "", use_cookies: true)
assert String.contains?(output, "--cookies #{cookie_file}")
end
+ test "doesn't include cookie options when cookies.txt exists but disabled", %{cookie_file: cookie_file} do
+ FilesystemUtils.write_p!(cookie_file, "cookie data")
+
+ assert {:ok, output} = Runner.run(@media_url, :foo, [], "", use_cookies: false)
+
+ refute String.contains?(output, "--cookies #{cookie_file}")
+ end
+
test "doesn't include cookie options when cookies.txt blank", %{cookie_file: cookie_file} do
FilesystemUtils.write_p!(cookie_file, " \n \n ")
- assert {:ok, output} = Runner.run(@media_url, [], "")
+ assert {:ok, output} = Runner.run(@media_url, :foo, [], "", use_cookies: true)
refute String.contains?(output, "--cookies")
refute String.contains?(output, cookie_file)
@@ -71,7 +86,7 @@ defmodule Pinchflat.YtDlp.CommandRunnerTest do
test "doesn't include cookie options when cookies.txt doesn't exist", %{cookie_file: cookie_file} do
File.rm(cookie_file)
- assert {:ok, output} = Runner.run(@media_url, [], "")
+ assert {:ok, output} = Runner.run(@media_url, :foo, [], "")
refute String.contains?(output, "--cookies")
refute String.contains?(output, cookie_file)
@@ -81,26 +96,90 @@ defmodule Pinchflat.YtDlp.CommandRunnerTest do
end
end
+ describe "run/4 when testing rate limit options" do
+ test "includes sleep interval options by default" do
+ Settings.set(extractor_sleep_interval_seconds: 5)
+
+ assert {:ok, output} = Runner.run(@media_url, :foo, [], "")
+
+ assert String.contains?(output, "--sleep-interval")
+ assert String.contains?(output, "--sleep-requests")
+ assert String.contains?(output, "--sleep-subtitles")
+ end
+
+ test "doesn't include sleep interval options when skip_sleep_interval is true" do
+ assert {:ok, output} = Runner.run(@media_url, :foo, [], "", skip_sleep_interval: true)
+
+ refute String.contains?(output, "--sleep-interval")
+ refute String.contains?(output, "--sleep-requests")
+ refute String.contains?(output, "--sleep-subtitles")
+ end
+
+ test "doesn't include sleep interval options when extractor_sleep_interval_seconds is 0" do
+ Settings.set(extractor_sleep_interval_seconds: 0)
+
+ assert {:ok, output} = Runner.run(@media_url, :foo, [], "")
+
+ refute String.contains?(output, "--sleep-interval")
+ refute String.contains?(output, "--sleep-requests")
+ refute String.contains?(output, "--sleep-subtitles")
+ end
+
+ test "includes limit_rate option when specified" do
+ Settings.set(download_throughput_limit: "100K")
+
+ assert {:ok, output} = Runner.run(@media_url, :foo, [], "")
+
+ assert String.contains?(output, "--limit-rate 100K")
+ end
+
+ test "doesn't include limit_rate option when download_throughput_limit is nil" do
+ Settings.set(download_throughput_limit: nil)
+
+ assert {:ok, output} = Runner.run(@media_url, :foo, [], "")
+
+ refute String.contains?(output, "--limit-rate")
+ end
+ end
+
describe "run/4 when testing global options" do
test "creates windows-safe filenames" do
- assert {:ok, output} = Runner.run(@media_url, [], "")
+ assert {:ok, output} = Runner.run(@media_url, :foo, [], "")
assert String.contains?(output, "--windows-filenames")
end
test "runs quietly" do
- assert {:ok, output} = Runner.run(@media_url, [], "")
+ assert {:ok, output} = Runner.run(@media_url, :foo, [], "")
assert String.contains?(output, "--quiet")
end
test "sets the cache directory" do
- assert {:ok, output} = Runner.run(@media_url, [], "")
+ assert {:ok, output} = Runner.run(@media_url, :foo, [], "")
assert String.contains?(output, "--cache-dir /tmp/test/tmpfiles/yt-dlp-cache")
end
end
+ describe "run/4 when testing misc options" do
+ test "includes --restrict-filenames when enabled" do
+ Settings.set(restrict_filenames: true)
+
+ assert {:ok, output} = Runner.run(@media_url, :foo, [], "")
+
+ assert String.contains?(output, "--restrict-filenames")
+ end
+
+ test "doesn't include --restrict-filenames when disabled" do
+ Settings.set(restrict_filenames: false)
+
+ assert {:ok, output} = Runner.run(@media_url, :foo, [], "")
+
+ refute String.contains?(output, "--restrict-filenames")
+ end
+ end
+
describe "version/0" do
test "adds the version arg" do
assert {:ok, output} = Runner.version()
@@ -109,6 +188,14 @@ defmodule Pinchflat.YtDlp.CommandRunnerTest do
end
end
+ describe "update/0" do
+ test "adds the update arg" do
+ assert {:ok, output} = Runner.update()
+
+ assert String.contains?(output, "--update")
+ end
+ end
+
defp wrap_executable(new_executable, fun) do
Application.put_env(:pinchflat, :yt_dlp_executable, new_executable)
fun.()
diff --git a/test/pinchflat/yt_dlp/media_collection_test.exs b/test/pinchflat/yt_dlp/media_collection_test.exs
index fa69e58..679c20e 100644
--- a/test/pinchflat/yt_dlp/media_collection_test.exs
+++ b/test/pinchflat/yt_dlp/media_collection_test.exs
@@ -10,7 +10,7 @@ defmodule Pinchflat.YtDlp.MediaCollectionTest do
describe "get_media_attributes_for_collection/2" do
test "returns a list of video attributes with no blank elements" do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl_opts ->
+ expect(YtDlpRunnerMock, :run, fn _url, :get_media_attributes_for_collection, _opts, _ot, _addl_opts ->
{:ok, source_attributes_return_fixture() <> "\n\n"}
end)
@@ -18,8 +18,8 @@ defmodule Pinchflat.YtDlp.MediaCollectionTest do
MediaCollection.get_media_attributes_for_collection(@channel_url)
end
- test "it passes the expected default args" do
- expect(YtDlpRunnerMock, :run, fn _url, opts, ot, _addl_opts ->
+ test "passes the expected default args" do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_media_attributes_for_collection, opts, ot, _addl_opts ->
assert opts == [:simulate, :skip_download, :ignore_no_formats_error, :no_warnings]
assert ot == Media.indexing_output_template()
@@ -30,14 +30,27 @@ defmodule Pinchflat.YtDlp.MediaCollectionTest do
end
test "returns the error straight through when the command fails" do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl_opts -> {:error, "Big issue", 1} end)
+ expect(YtDlpRunnerMock, :run, fn _url, :get_media_attributes_for_collection, _opts, _ot, _addl_opts ->
+ {:error, "Big issue", 1}
+ end)
assert {:error, "Big issue", 1} = MediaCollection.get_media_attributes_for_collection(@channel_url)
end
- test "passes the explict tmpfile path to runner" do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, addl_opts ->
- assert [{:output_filepath, filepath}] = addl_opts
+ test "passes long additional command options" do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_media_attributes_for_collection, opts, _ot, _addl_opts ->
+ assert :foo in opts
+
+ {:ok, ""}
+ end)
+
+ assert {:ok, _} = MediaCollection.get_media_attributes_for_collection(@channel_url, [:foo])
+ end
+
+ test "passes additional args to runner" do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_media_attributes_for_collection, _opts, _ot, addl_opts ->
+ assert [{:output_filepath, filepath} | _] = addl_opts
+ assert {:use_cookies, false} in addl_opts
assert String.ends_with?(filepath, ".json")
{:ok, ""}
@@ -47,7 +60,10 @@ defmodule Pinchflat.YtDlp.MediaCollectionTest do
end
test "supports an optional file_listener_handler that gets passed a filename" do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl_opts -> {:ok, ""} end)
+ expect(YtDlpRunnerMock, :run, fn _url, :get_media_attributes_for_collection, _opts, _ot, _addl_opts ->
+ {:ok, ""}
+ end)
+
current_self = self()
handler = fn filename ->
@@ -55,14 +71,14 @@ defmodule Pinchflat.YtDlp.MediaCollectionTest do
end
assert {:ok, _} =
- MediaCollection.get_media_attributes_for_collection(@channel_url, file_listener_handler: handler)
+ MediaCollection.get_media_attributes_for_collection(@channel_url, [], file_listener_handler: handler)
assert_receive {:handler, filename}
assert String.ends_with?(filename, ".json")
end
test "gracefully handles partially failed responses" do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl_opts ->
+ expect(YtDlpRunnerMock, :run, fn _url, :get_media_attributes_for_collection, _opts, _ot, _addl_opts ->
{:ok, "INVALID\n\n" <> source_attributes_return_fixture() <> "\nINVALID\n"}
end)
@@ -72,8 +88,8 @@ defmodule Pinchflat.YtDlp.MediaCollectionTest do
end
describe "get_source_details/1" do
- test "it returns a map with data on success" do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot ->
+ test "returns a map with data on success" do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_source_details, _opts, _ot, _addl_opts ->
Phoenix.json_library().encode(%{
channel: "PinchflatTestChannel",
channel_id: "UCQH2",
@@ -92,8 +108,8 @@ defmodule Pinchflat.YtDlp.MediaCollectionTest do
} = res
end
- test "it passes the expected args to the backend runner" do
- expect(YtDlpRunnerMock, :run, fn @channel_url, opts, ot ->
+ test "passes the expected args to the runner" do
+ expect(YtDlpRunnerMock, :run, fn @channel_url, :get_source_details, opts, ot, _addl_opts ->
assert opts == [:simulate, :skip_download, :ignore_no_formats_error, playlist_end: 1]
assert ot == "%(.{channel,channel_id,playlist_id,playlist_title,filename})j"
@@ -103,61 +119,99 @@ defmodule Pinchflat.YtDlp.MediaCollectionTest do
assert {:ok, _} = MediaCollection.get_source_details(@channel_url)
end
- test "it returns an error if the runner returns an error" do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot -> {:error, "Big issue", 1} end)
+ test "passes custom args to the runner" do
+ expect(YtDlpRunnerMock, :run, fn @channel_url, :get_source_details, opts, _ot, _addl_opts ->
+ assert {:foo, :bar} in opts
+
+ {:ok, "{}"}
+ end)
+
+ assert {:ok, _} = MediaCollection.get_source_details(@channel_url, foo: :bar)
+ end
+
+ test "passes additional args to the runner" do
+ expect(YtDlpRunnerMock, :run, fn @channel_url, :get_source_details, _opts, _ot, addl_opts ->
+ assert {:use_cookies, true} in addl_opts
+
+ {:ok, "{}"}
+ end)
+
+ assert {:ok, _} = MediaCollection.get_source_details(@channel_url, [], use_cookies: true)
+ end
+
+ test "returns an error if the runner returns an error" do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_source_details, _opts, _ot, _addl_opts -> {:error, "Big issue", 1} end)
assert {:error, "Big issue", 1} = MediaCollection.get_source_details(@channel_url)
end
- test "it returns an error if the output is not JSON" do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot -> {:ok, "Not JSON"} end)
+ test "returns an error if the output is not JSON" do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_source_details, _opts, _ot, _addl_opts -> {:ok, "Not JSON"} end)
- assert {:error, %Jason.DecodeError{}} = MediaCollection.get_source_details(@channel_url)
+ assert {:error, "Error decoding JSON response"} = MediaCollection.get_source_details(@channel_url)
end
end
describe "get_source_metadata/1" do
- test "it returns a map with data on success" do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot ->
+ test "returns a map with data on success" do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_source_metadata, _opts, _ot, _addl_opts ->
Phoenix.json_library().encode(%{channel: "PinchflatTestChannel"})
end)
- assert {:ok, res} = MediaCollection.get_source_metadata(@channel_url)
+ assert {:ok, res} = MediaCollection.get_source_metadata(@channel_url, playlist_items: 0)
assert %{"channel" => "PinchflatTestChannel"} = res
end
- test "it passes the expected args to the backend runner" do
- expect(YtDlpRunnerMock, :run, fn @channel_url, opts, ot ->
- assert opts == [playlist_items: 0]
+ test "passes the expected args to the backend runner" do
+ expect(YtDlpRunnerMock, :run, fn @channel_url, :get_source_metadata, opts, ot, _addl_opts ->
+ assert opts == [:skip_download, playlist_items: 0]
assert ot == "playlist:%()j"
{:ok, "{}"}
end)
- assert {:ok, _} = MediaCollection.get_source_metadata(@channel_url)
+ assert {:ok, _} = MediaCollection.get_source_metadata(@channel_url, playlist_items: 0)
end
- test "it returns an error if the runner returns an error" do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot -> {:error, "Big issue", 1} end)
-
- assert {:error, "Big issue", 1} = MediaCollection.get_source_metadata(@channel_url)
- end
-
- test "it returns an error if the output is not JSON" do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot -> {:ok, "Not JSON"} end)
-
- assert {:error, %Jason.DecodeError{}} = MediaCollection.get_source_metadata(@channel_url)
- end
-
- test "allows you to pass additional opts" do
- expect(YtDlpRunnerMock, :run, fn _url, opts, _ot ->
- assert opts == [playlist_items: 0, real_opt: :yup]
+ test "passes additional args to the runner" do
+ expect(YtDlpRunnerMock, :run, fn @channel_url, :get_source_metadata, _opts, _ot, addl_opts ->
+ assert {:use_cookies, true} in addl_opts
{:ok, "{}"}
end)
- assert {:ok, _} = MediaCollection.get_source_metadata(@channel_url, real_opt: :yup)
+ assert {:ok, _} = MediaCollection.get_source_metadata(@channel_url, [playlist_items: 0], use_cookies: true)
+ end
+
+ test "passes custom args to the runner" do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_source_metadata, opts, _ot, _addl_opts ->
+ assert opts == [:skip_download, playlist_items: 1, real_opt: :yup]
+
+ {:ok, "{}"}
+ end)
+
+ assert {:ok, _} = MediaCollection.get_source_metadata(@channel_url, playlist_items: 1, real_opt: :yup)
+ end
+
+ test "blows up if you pass addl opts but don't pass playlist items" do
+ assert_raise KeyError, fn ->
+ MediaCollection.get_source_metadata(@channel_url, real_opt: :yup)
+ end
+ end
+
+ test "returns an error if the runner returns an error" do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_source_metadata, _opts, _ot, _addl_opts ->
+ {:error, "Big issue", 1}
+ end)
+
+ assert {:error, "Big issue", 1} = MediaCollection.get_source_metadata(@channel_url, playlist_items: 0)
+ end
+
+ test "returns an error if the output is not JSON" do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_source_metadata, _opts, _ot, _addl_opts -> {:ok, "Not JSON"} end)
+
+ assert {:error, %Jason.DecodeError{}} = MediaCollection.get_source_metadata(@channel_url, playlist_items: 0)
end
end
end
diff --git a/test/pinchflat/yt_dlp/media_test.exs b/test/pinchflat/yt_dlp/media_test.exs
index 937d2ca..4f5025d 100644
--- a/test/pinchflat/yt_dlp/media_test.exs
+++ b/test/pinchflat/yt_dlp/media_test.exs
@@ -9,7 +9,7 @@ defmodule Pinchflat.YtDlp.MediaTest do
describe "download/3" do
test "calls the backend runner with the expected arguments" do
- expect(YtDlpRunnerMock, :run, fn @media_url, opts, ot, addl ->
+ expect(YtDlpRunnerMock, :run, fn @media_url, :download, opts, ot, addl ->
assert [:no_simulate] = opts
assert "after_move:%()j" = ot
assert addl == []
@@ -20,19 +20,28 @@ defmodule Pinchflat.YtDlp.MediaTest do
assert {:ok, _} = Media.download(@media_url)
end
- test "passes along additional options" do
- expect(YtDlpRunnerMock, :run, fn _url, opts, _ot, addl ->
+ test "passes along custom command args" do
+ expect(YtDlpRunnerMock, :run, fn _url, :download, opts, _ot, _addl ->
assert [:no_simulate, :custom_arg] = opts
+
+ {:ok, "{}"}
+ end)
+
+ assert {:ok, _} = Media.download(@media_url, [:custom_arg])
+ end
+
+ test "passes along additional options" do
+ expect(YtDlpRunnerMock, :run, fn _url, :download, _opts, _ot, addl ->
assert [addl_arg: true] = addl
{:ok, "{}"}
end)
- assert {:ok, _} = Media.download(@media_url, [:custom_arg], addl_arg: true)
+ assert {:ok, _} = Media.download(@media_url, [], addl_arg: true)
end
test "parses and returns the generated file as JSON" do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot, _addl ->
+ expect(YtDlpRunnerMock, :run, fn _url, :download, _opts, _ot, _addl ->
{:ok, render_metadata(:media_metadata)}
end)
@@ -41,7 +50,7 @@ defmodule Pinchflat.YtDlp.MediaTest do
end
test "returns errors" do
- expect(YtDlpRunnerMock, :run, fn _url, _opt, _ot, _addl ->
+ expect(YtDlpRunnerMock, :run, fn _url, :download, _opt, _ot, _addl ->
{:error, "something"}
end)
@@ -49,9 +58,77 @@ defmodule Pinchflat.YtDlp.MediaTest do
end
end
+ describe "get_downloadable_status/1" do
+ test "returns :downloadable if the media was never live" do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_downloadable_status, _opts, _ot, _addl ->
+ {:ok, Phoenix.json_library().encode!(%{"live_status" => "not_live"})}
+ end)
+
+ assert {:ok, :downloadable} = Media.get_downloadable_status(@media_url)
+ end
+
+ test "returns :downloadable if the media was live and has been processed" do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_downloadable_status, _opts, _ot, _addl ->
+ {:ok, Phoenix.json_library().encode!(%{"live_status" => "was_live"})}
+ end)
+
+ assert {:ok, :downloadable} = Media.get_downloadable_status(@media_url)
+ end
+
+ test "returns :downloadable if the media's live_status is nil" do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_downloadable_status, _opts, _ot, _addl ->
+ {:ok, Phoenix.json_library().encode!(%{"live_status" => nil})}
+ end)
+
+ assert {:ok, :downloadable} = Media.get_downloadable_status(@media_url)
+ end
+
+ test "returns :ignorable if the media is currently live" do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_downloadable_status, _opts, _ot, _addl ->
+ {:ok, Phoenix.json_library().encode!(%{"live_status" => "is_live"})}
+ end)
+
+ assert {:ok, :ignorable} = Media.get_downloadable_status(@media_url)
+ end
+
+ test "returns :ignorable if the media is scheduled to be live" do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_downloadable_status, _opts, _ot, _addl ->
+ {:ok, Phoenix.json_library().encode!(%{"live_status" => "is_upcoming"})}
+ end)
+
+ assert {:ok, :ignorable} = Media.get_downloadable_status(@media_url)
+ end
+
+ test "returns :ignorable if the media was live but hasn't been processed" do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_downloadable_status, _opts, _ot, _addl ->
+ {:ok, Phoenix.json_library().encode!(%{"live_status" => "post_live"})}
+ end)
+
+ assert {:ok, :ignorable} = Media.get_downloadable_status(@media_url)
+ end
+
+ test "returns an error if the downloadable status can't be determined" do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_downloadable_status, _opts, _ot, _addl ->
+ {:ok, Phoenix.json_library().encode!(%{"live_status" => "what_tha"})}
+ end)
+
+ assert {:error, "Unknown live status: what_tha"} = Media.get_downloadable_status(@media_url)
+ end
+
+ test "optionally accepts additional args" do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_downloadable_status, _opts, _ot, addl ->
+ assert [addl_arg: true] = addl
+
+ {:ok, Phoenix.json_library().encode!(%{"live_status" => "not_live"})}
+ end)
+
+ assert {:ok, :downloadable} = Media.get_downloadable_status(@media_url, addl_arg: true)
+ end
+ end
+
describe "download_thumbnail/2" do
test "calls the backend runner with the expected arguments" do
- expect(YtDlpRunnerMock, :run, fn @media_url, opts, ot ->
+ expect(YtDlpRunnerMock, :run, fn @media_url, :download_thumbnail, opts, ot, _addl ->
assert opts == [:no_simulate, :skip_download, :write_thumbnail, {:convert_thumbnail, "jpg"}]
assert ot == "after_move:%()j"
@@ -61,8 +138,8 @@ defmodule Pinchflat.YtDlp.MediaTest do
assert {:ok, _} = Media.download_thumbnail(@media_url)
end
- test "passes along additional options" do
- expect(YtDlpRunnerMock, :run, fn _url, opts, _ot ->
+ test "passes along custom command args" do
+ expect(YtDlpRunnerMock, :run, fn _url, :download_thumbnail, opts, _ot, _addl ->
assert :custom_arg in opts
{:ok, "{}"}
@@ -71,8 +148,18 @@ defmodule Pinchflat.YtDlp.MediaTest do
assert {:ok, _} = Media.download_thumbnail(@media_url, [:custom_arg])
end
+ test "passes along additional options" do
+ expect(YtDlpRunnerMock, :run, fn _url, :download_thumbnail, _opts, _ot, addl ->
+ assert [addl_arg: true] = addl
+
+ {:ok, "{}"}
+ end)
+
+ assert {:ok, _} = Media.download_thumbnail(@media_url, [], addl_arg: true)
+ end
+
test "returns errors" do
- expect(YtDlpRunnerMock, :run, fn _url, _opt, _ot ->
+ expect(YtDlpRunnerMock, :run, fn _url, :download_thumbnail, _opt, _ot, _addl ->
{:error, "something"}
end)
@@ -82,7 +169,7 @@ defmodule Pinchflat.YtDlp.MediaTest do
describe "get_media_attributes/1" do
test "returns a list of video attributes" do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot ->
+ expect(YtDlpRunnerMock, :run, fn _url, :get_media_attributes, _opts, _ot, _addl ->
{:ok, media_attributes_return_fixture()}
end)
@@ -91,7 +178,7 @@ defmodule Pinchflat.YtDlp.MediaTest do
end
test "it passes the expected default args" do
- expect(YtDlpRunnerMock, :run, fn _url, opts, ot ->
+ expect(YtDlpRunnerMock, :run, fn _url, :get_media_attributes, opts, ot, _addl ->
assert opts == [:simulate, :skip_download]
assert ot == Media.indexing_output_template()
@@ -101,8 +188,26 @@ defmodule Pinchflat.YtDlp.MediaTest do
assert {:ok, _} = Media.get_media_attributes(@media_url)
end
+ test "passes along additional command options" do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_media_attributes, opts, _ot, _addl ->
+ assert [:simulate, :skip_download, :custom_arg] = opts
+ {:ok, media_attributes_return_fixture()}
+ end)
+
+ assert {:ok, _} = Media.get_media_attributes(@media_url, [:custom_arg])
+ end
+
+ test "passes along additional options" do
+ expect(YtDlpRunnerMock, :run, fn _url, :get_media_attributes, _opts, _ot, addl ->
+ assert [addl_arg: true] = addl
+ {:ok, media_attributes_return_fixture()}
+ end)
+
+ assert {:ok, _} = Media.get_media_attributes(@media_url, [], addl_arg: true)
+ end
+
test "returns the error straight through when the command fails" do
- expect(YtDlpRunnerMock, :run, fn _url, _opts, _ot -> {:error, "Big issue", 1} end)
+ expect(YtDlpRunnerMock, :run, fn _url, :get_media_attributes, _opts, _ot, _addl -> {:error, "Big issue", 1} end)
assert {:error, "Big issue", 1} = Media.get_media_attributes(@media_url)
end
@@ -110,8 +215,12 @@ defmodule Pinchflat.YtDlp.MediaTest do
describe "indexing_output_template/0" do
test "contains all the greatest hits" do
- assert "%(.{id,title,was_live,webpage_url,description,aspect_ratio,duration,upload_date,timestamp})j" ==
- Media.indexing_output_template()
+ attrs =
+ ~w(id title live_status original_url description aspect_ratio duration upload_date timestamp playlist_index filename)a
+
+ formatted_attrs = "%(.{#{Enum.join(attrs, ",")}})j"
+
+ assert formatted_attrs == Media.indexing_output_template()
end
end
@@ -121,12 +230,14 @@ defmodule Pinchflat.YtDlp.MediaTest do
"id" => "TiZPUDkDYbk",
"title" => "Trying to Wheelie Without the Rear Brake",
"description" => "I'm not sure what I expected.",
- "webpage_url" => "https://www.youtube.com/watch?v=TiZPUDkDYbk",
- "was_live" => false,
+ "original_url" => "https://www.youtube.com/watch?v=TiZPUDkDYbk",
+ "live_status" => "not_live",
"aspect_ratio" => 1.0,
"duration" => 60,
"upload_date" => "20210101",
- "timestamp" => 1_600_000_000
+ "timestamp" => 1_600_000_000,
+ "playlist_index" => 1,
+ "filename" => "TiZPUDkDYbk.mp4"
}
assert %Media{
@@ -137,13 +248,15 @@ defmodule Pinchflat.YtDlp.MediaTest do
livestream: false,
short_form_content: false,
uploaded_at: ~U[2020-09-13 12:26:40Z],
- duration_seconds: 60
+ duration_seconds: 60,
+ playlist_index: 1,
+ predicted_media_filepath: "TiZPUDkDYbk.mp4"
} == Media.response_to_struct(response)
end
test "sets short_form_content to true if the URL contains /shorts/" do
response = %{
- "webpage_url" => "https://www.youtube.com/shorts/TiZPUDkDYbk",
+ "original_url" => "https://www.youtube.com/shorts/TiZPUDkDYbk",
"aspect_ratio" => 1.0,
"duration" => 61,
"upload_date" => "20210101"
@@ -154,9 +267,9 @@ defmodule Pinchflat.YtDlp.MediaTest do
test "sets short_form_content to true if the aspect ratio are duration are right" do
response = %{
- "webpage_url" => "https://www.youtube.com/watch?v=TiZPUDkDYbk",
+ "original_url" => "https://www.youtube.com/watch?v=TiZPUDkDYbk",
"aspect_ratio" => 0.5,
- "duration" => 59,
+ "duration" => 150,
"upload_date" => "20210101"
}
@@ -165,7 +278,7 @@ defmodule Pinchflat.YtDlp.MediaTest do
test "sets short_form_content to false otherwise" do
response = %{
- "webpage_url" => "https://www.youtube.com/watch?v=TiZPUDkDYbk",
+ "original_url" => "https://www.youtube.com/watch?v=TiZPUDkDYbk",
"aspect_ratio" => 1.0,
"duration" => 61,
"upload_date" => "20210101"
@@ -176,7 +289,7 @@ defmodule Pinchflat.YtDlp.MediaTest do
test "doesn't blow up if short form content-related fields are missing" do
response = %{
- "webpage_url" => nil,
+ "original_url" => nil,
"aspect_ratio" => nil,
"duration" => nil,
"upload_date" => "20210101"
@@ -187,7 +300,7 @@ defmodule Pinchflat.YtDlp.MediaTest do
test "parses the duration" do
response = %{
- "webpage_url" => "https://www.youtube.com/watch?v=TiZPUDkDYbk",
+ "original_url" => "https://www.youtube.com/watch?v=TiZPUDkDYbk",
"aspect_ratio" => 1.0,
"duration" => 60.4,
"upload_date" => "20210101"
@@ -198,7 +311,7 @@ defmodule Pinchflat.YtDlp.MediaTest do
test "doesn't blow up if duration is missing" do
response = %{
- "webpage_url" => "https://www.youtube.com/watch?v=TiZPUDkDYbk",
+ "original_url" => "https://www.youtube.com/watch?v=TiZPUDkDYbk",
"aspect_ratio" => 1.0,
"duration" => nil,
"upload_date" => "20210101"
@@ -207,9 +320,9 @@ defmodule Pinchflat.YtDlp.MediaTest do
assert %Media{duration_seconds: nil} = Media.response_to_struct(response)
end
- test "sets livestream to false if the was_live field isn't present" do
+ test "sets livestream to false if the live_status field isn't present" do
response = %{
- "webpage_url" => "https://www.youtube.com/watch?v=TiZPUDkDYbk",
+ "original_url" => "https://www.youtube.com/watch?v=TiZPUDkDYbk",
"aspect_ratio" => 1.0,
"duration" => 60,
"upload_date" => "20210101"
@@ -217,12 +330,23 @@ defmodule Pinchflat.YtDlp.MediaTest do
assert %Media{livestream: false} = Media.response_to_struct(response)
end
+
+ test "doesn't blow up if playlist_index is missing" do
+ response = %{
+ "original_url" => "https://www.youtube.com/watch?v=TiZPUDkDYbk",
+ "aspect_ratio" => 1.0,
+ "duration" => nil,
+ "upload_date" => "20210101"
+ }
+
+ assert %Media{playlist_index: 0} = Media.response_to_struct(response)
+ end
end
describe "response_to_struct/1 when testing uploaded_at" do
test "parses the upload date from the timestamp if present" do
response = %{
- "webpage_url" => "https://www.youtube.com/watch?v=TiZPUDkDYbk",
+ "original_url" => "https://www.youtube.com/watch?v=TiZPUDkDYbk",
"aspect_ratio" => 1.0,
"duration" => 61,
"upload_date" => "20210101",
@@ -236,7 +360,7 @@ defmodule Pinchflat.YtDlp.MediaTest do
test "parses the upload date from the uploaded_at if timestamp is present but nil" do
response = %{
- "webpage_url" => "https://www.youtube.com/watch?v=TiZPUDkDYbk",
+ "original_url" => "https://www.youtube.com/watch?v=TiZPUDkDYbk",
"aspect_ratio" => 1.0,
"duration" => 61,
"upload_date" => "20210101",
@@ -250,7 +374,7 @@ defmodule Pinchflat.YtDlp.MediaTest do
test "parses the upload date from the uploaded_at if timestamp absent" do
response = %{
- "webpage_url" => "https://www.youtube.com/watch?v=TiZPUDkDYbk",
+ "original_url" => "https://www.youtube.com/watch?v=TiZPUDkDYbk",
"aspect_ratio" => 1.0,
"duration" => 61,
"upload_date" => "20210101"
@@ -263,7 +387,7 @@ defmodule Pinchflat.YtDlp.MediaTest do
test "doesn't blow up if upload date is missing" do
response = %{
- "webpage_url" => "https://www.youtube.com/watch?v=TiZPUDkDYbk",
+ "original_url" => "https://www.youtube.com/watch?v=TiZPUDkDYbk",
"aspect_ratio" => 1.0,
"duration" => 61,
"upload_date" => nil
diff --git a/test/pinchflat/yt_dlp/update_worker_test.exs b/test/pinchflat/yt_dlp/update_worker_test.exs
new file mode 100644
index 0000000..fed0510
--- /dev/null
+++ b/test/pinchflat/yt_dlp/update_worker_test.exs
@@ -0,0 +1,24 @@
+defmodule Pinchflat.YtDlp.UpdateWorkerTest do
+ use Pinchflat.DataCase
+
+ alias Pinchflat.Settings
+ alias Pinchflat.YtDlp.UpdateWorker
+
+ describe "perform/1" do
+ test "calls the yt-dlp runner to update yt-dlp" do
+ expect(YtDlpRunnerMock, :update, fn -> {:ok, ""} end)
+ expect(YtDlpRunnerMock, :version, fn -> {:ok, ""} end)
+
+ perform_job(UpdateWorker, %{})
+ end
+
+ test "saves the new version to the database" do
+ expect(YtDlpRunnerMock, :update, fn -> {:ok, ""} end)
+ expect(YtDlpRunnerMock, :version, fn -> {:ok, "1.2.3"} end)
+
+ perform_job(UpdateWorker, %{})
+
+ assert {:ok, "1.2.3"} = Settings.get(:yt_dlp_version)
+ end
+ end
+end
diff --git a/test/pinchflat_web/controllers/error_html_test.exs b/test/pinchflat_web/controllers/error_html_test.exs
index d9baab3..89bb46a 100644
--- a/test/pinchflat_web/controllers/error_html_test.exs
+++ b/test/pinchflat_web/controllers/error_html_test.exs
@@ -5,10 +5,10 @@ defmodule PinchflatWeb.ErrorHTMLTest do
import Phoenix.Template
test "renders 404.html" do
- assert render_to_string(PinchflatWeb.ErrorHTML, "404", "html", []) == "Not Found"
+ assert render_to_string(PinchflatWeb.ErrorHTML, "404", "html", []) =~ "404 (not found)"
end
test "renders 500.html" do
- assert render_to_string(PinchflatWeb.ErrorHTML, "500", "html", []) == "Internal Server Error"
+ assert render_to_string(PinchflatWeb.ErrorHTML, "500", "html", []) =~ "Internal Server Error"
end
end
diff --git a/test/pinchflat_web/controllers/media_item_controller_test.exs b/test/pinchflat_web/controllers/media_item_controller_test.exs
index 4070938..34457d5 100644
--- a/test/pinchflat_web/controllers/media_item_controller_test.exs
+++ b/test/pinchflat_web/controllers/media_item_controller_test.exs
@@ -56,7 +56,7 @@ defmodule PinchflatWeb.MediaItemControllerTest do
describe "delete media" do
setup do
media_item = media_item_with_attachments()
- stub(UserScriptRunnerMock, :run, fn _event_type, _data -> :ok end)
+ stub(UserScriptRunnerMock, :run, fn _event_type, _data -> {:ok, "", 0} end)
%{media_item: media_item}
end
diff --git a/test/pinchflat_web/controllers/media_profile_controller_test.exs b/test/pinchflat_web/controllers/media_profile_controller_test.exs
index af174ca..2c3dcfc 100644
--- a/test/pinchflat_web/controllers/media_profile_controller_test.exs
+++ b/test/pinchflat_web/controllers/media_profile_controller_test.exs
@@ -1,12 +1,11 @@
defmodule PinchflatWeb.MediaProfileControllerTest do
use PinchflatWeb.ConnCase
- import Pinchflat.MediaFixtures
- import Pinchflat.SourcesFixtures
import Pinchflat.ProfilesFixtures
alias Pinchflat.Repo
alias Pinchflat.Settings
+ alias Pinchflat.Profiles.MediaProfileDeletionWorker
@create_attrs %{name: "some name", output_path_template: "output_template.{{ ext }}"}
@update_attrs %{
@@ -23,8 +22,17 @@ defmodule PinchflatWeb.MediaProfileControllerTest do
describe "index" do
test "lists all media_profiles", %{conn: conn} do
+ profile = media_profile_fixture()
conn = get(conn, ~p"/media_profiles")
+
assert html_response(conn, 200) =~ "Media Profiles"
+ assert html_response(conn, 200) =~ profile.name
+ end
+
+ test "omits profiles that have marked_for_deletion_at set", %{conn: conn} do
+ profile = media_profile_fixture(marked_for_deletion_at: DateTime.utc_now())
+ conn = get(conn, ~p"/media_profiles")
+ refute html_response(conn, 200) =~ profile.name
end
end
@@ -71,6 +79,15 @@ defmodule PinchflatWeb.MediaProfileControllerTest do
refute html_response(conn, 200) =~ "MENU"
end
+
+ test "preloads some attributes when using a template", %{conn: conn} do
+ profile = media_profile_fixture(name: "My first profile", download_subs: true, sub_langs: "de")
+
+ conn = get(conn, ~p"/media_profiles/new", %{"template_id" => profile.id})
+ assert html_response(conn, 200) =~ "New Media Profile"
+ assert html_response(conn, 200) =~ profile.sub_langs
+ refute html_response(conn, 200) =~ profile.name
+ end
end
describe "edit media_profile" do
@@ -102,34 +119,28 @@ defmodule PinchflatWeb.MediaProfileControllerTest do
end
end
- describe "delete media_profile when just deleting the records" do
+ describe "delete media_profile in all cases" do
setup [:create_media_profile]
- test "deletes chosen media_profile and its associations", %{conn: conn, media_profile: media_profile} do
- source = source_fixture(media_profile_id: media_profile.id)
- media_item = media_item_with_attachments(%{source_id: source.id})
-
- conn = delete(conn, ~p"/media_profiles/#{media_profile}")
- assert redirected_to(conn) == ~p"/media_profiles"
-
- assert_raise Ecto.NoResultsError, fn -> Repo.reload!(media_profile) end
- assert_raise Ecto.NoResultsError, fn -> Repo.reload!(source) end
- assert_raise Ecto.NoResultsError, fn -> Repo.reload!(media_item) end
- end
-
test "redirects to the media_profiles page", %{conn: conn, media_profile: media_profile} do
conn = delete(conn, ~p"/media_profiles/#{media_profile}")
assert redirected_to(conn) == ~p"/media_profiles"
end
- test "doesn't delete any files", %{conn: conn, media_profile: media_profile} do
- source = source_fixture(media_profile_id: media_profile.id)
- media_item = media_item_with_attachments(%{source_id: source.id})
+ test "sets marked_for_deletion_at", %{conn: conn, media_profile: media_profile} do
+ delete(conn, ~p"/media_profiles/#{media_profile}")
+ assert Repo.reload!(media_profile).marked_for_deletion_at
+ end
+ end
+ describe "delete media_profile when just deleting the records" do
+ setup [:create_media_profile]
+
+ test "enqueues a job without the delete_files arg", %{conn: conn, media_profile: media_profile} do
delete(conn, ~p"/media_profiles/#{media_profile}")
- assert File.exists?(media_item.media_filepath)
+ assert [%{args: %{"delete_files" => false}}] = all_enqueued(worker: MediaProfileDeletionWorker)
end
end
@@ -137,36 +148,15 @@ defmodule PinchflatWeb.MediaProfileControllerTest do
setup [:create_media_profile]
setup do
- stub(UserScriptRunnerMock, :run, fn _event_type, _data -> :ok end)
+ stub(UserScriptRunnerMock, :run, fn _event_type, _data -> {:ok, "", 0} end)
:ok
end
- test "deletes chosen media_profile and its associations", %{conn: conn, media_profile: media_profile} do
- source = source_fixture(media_profile_id: media_profile.id)
- media_item = media_item_with_attachments(%{source_id: source.id})
-
- conn = delete(conn, ~p"/media_profiles/#{media_profile}?delete_files=true")
- assert redirected_to(conn) == ~p"/media_profiles"
-
- assert_raise Ecto.NoResultsError, fn -> Repo.reload!(media_profile) end
- assert_raise Ecto.NoResultsError, fn -> Repo.reload!(source) end
- assert_raise Ecto.NoResultsError, fn -> Repo.reload!(media_item) end
- end
-
- test "redirects to the media_profiles page", %{conn: conn, media_profile: media_profile} do
- conn = delete(conn, ~p"/media_profiles/#{media_profile}?delete_files=true")
-
- assert redirected_to(conn) == ~p"/media_profiles"
- end
-
- test "deletes the files", %{conn: conn, media_profile: media_profile} do
- source = source_fixture(media_profile_id: media_profile.id)
- media_item = media_item_with_attachments(%{source_id: source.id})
-
+ test "enqueues a job with the delete_files arg", %{conn: conn, media_profile: media_profile} do
delete(conn, ~p"/media_profiles/#{media_profile}?delete_files=true")
- refute File.exists?(media_item.media_filepath)
+ assert [%{args: %{"delete_files" => true}}] = all_enqueued(worker: MediaProfileDeletionWorker)
end
end
diff --git a/test/pinchflat_web/controllers/pages/job_table_live_test.exs b/test/pinchflat_web/controllers/pages/job_table_live_test.exs
index 9e62b4e..32ddfce 100644
--- a/test/pinchflat_web/controllers/pages/job_table_live_test.exs
+++ b/test/pinchflat_web/controllers/pages/job_table_live_test.exs
@@ -6,7 +6,6 @@ defmodule PinchflatWeb.Pages.JobTableLiveTest do
import Pinchflat.MediaFixtures
import Pinchflat.SourcesFixtures
- alias Pinchflat.Utils.StringUtils
alias Pinchflat.Pages.JobTableLive
alias Pinchflat.Downloading.MediaDownloadWorker
alias Pinchflat.FastIndexing.FastIndexingWorker
@@ -48,7 +47,7 @@ defmodule PinchflatWeb.Pages.JobTableLiveTest do
{_source, media_item, _task, _job} = create_media_item_job()
{:ok, _view, html} = live_isolated(conn, JobTableLive, session: %{})
- assert html =~ StringUtils.truncate(media_item.title, 35)
+ assert html =~ media_item.title
end
test "shows a media item link", %{conn: conn} do
@@ -62,7 +61,7 @@ defmodule PinchflatWeb.Pages.JobTableLiveTest do
{source, _task, _job} = create_source_job()
{:ok, _view, html} = live_isolated(conn, JobTableLive, session: %{})
- assert html =~ StringUtils.truncate(source.custom_name, 35)
+ assert html =~ source.custom_name
end
test "shows a source link", %{conn: conn} do
diff --git a/test/pinchflat_web/controllers/podcast_controller_test.exs b/test/pinchflat_web/controllers/podcast_controller_test.exs
index 676ab54..91d31ed 100644
--- a/test/pinchflat_web/controllers/podcast_controller_test.exs
+++ b/test/pinchflat_web/controllers/podcast_controller_test.exs
@@ -4,6 +4,37 @@ defmodule PinchflatWeb.PodcastControllerTest do
import Pinchflat.MediaFixtures
import Pinchflat.SourcesFixtures
+ alias Pinchflat.Settings
+
+ describe "opml_feed" do
+ test "renders the XML document", %{conn: conn} do
+ source = source_fixture()
+ route_token = Settings.get!(:route_token)
+
+ conn = get(conn, ~p"/sources/opml.xml?#{[route_token: route_token]}")
+
+ assert conn.status == 200
+ assert {"content-type", "application/opml+xml; charset=utf-8"} in conn.resp_headers
+ assert {"content-disposition", "inline"} in conn.resp_headers
+ assert conn.resp_body =~ ~s"http://www.example.com/sources/#{source.uuid}/feed.xml"
+ assert conn.resp_body =~ "text=\"#{source.custom_name}\""
+ end
+
+ test "returns 401 if the route token is incorrect", %{conn: conn} do
+ conn = get(conn, ~p"/sources/opml.xml?route_token=incorrect")
+
+ assert conn.status == 401
+ assert conn.resp_body == "Unauthorized"
+ end
+
+ test "returns 401 if the route token is missing", %{conn: conn} do
+ conn = get(conn, ~p"/sources/opml.xml")
+
+ assert conn.status == 401
+ assert conn.resp_body == "Unauthorized"
+ end
+ end
+
describe "rss_feed" do
test "renders the XML document", %{conn: conn} do
source = source_fixture()
diff --git a/test/pinchflat_web/controllers/source_controller_test.exs b/test/pinchflat_web/controllers/source_controller_test.exs
index b9d6882..8580d23 100644
--- a/test/pinchflat_web/controllers/source_controller_test.exs
+++ b/test/pinchflat_web/controllers/source_controller_test.exs
@@ -7,6 +7,8 @@ defmodule PinchflatWeb.SourceControllerTest do
alias Pinchflat.Repo
alias Pinchflat.Settings
+ alias Pinchflat.Media.FileSyncingWorker
+ alias Pinchflat.Sources.SourceDeletionWorker
alias Pinchflat.Downloading.MediaDownloadWorker
alias Pinchflat.Metadata.SourceMetadataStorageWorker
alias Pinchflat.SlowIndexing.MediaCollectionIndexingWorker
@@ -32,7 +34,8 @@ defmodule PinchflatWeb.SourceControllerTest do
end
describe "index" do
- test "lists all sources", %{conn: conn} do
+ # Most of the tests are in `index_table_list_test.exs`
+ test "returns 200", %{conn: conn} do
conn = get(conn, ~p"/sources")
assert html_response(conn, 200) =~ "Sources"
end
@@ -50,11 +53,20 @@ defmodule PinchflatWeb.SourceControllerTest do
refute html_response(conn, 200) =~ "MENU"
end
+
+ test "preloads some attributes when using a template", %{conn: conn} do
+ source = source_fixture(custom_name: "My first source", download_cutoff_date: "2021-01-01")
+
+ conn = get(conn, ~p"/sources/new", %{"template_id" => source.id})
+ assert html_response(conn, 200) =~ "New Source"
+ assert html_response(conn, 200) =~ "2021-01-01"
+ refute html_response(conn, 200) =~ source.custom_name
+ end
end
describe "create source" do
test "redirects to show when data is valid", %{conn: conn, create_attrs: create_attrs} do
- expect(YtDlpRunnerMock, :run, 1, &runner_function_mock/3)
+ expect(YtDlpRunnerMock, :run, 1, &runner_function_mock/5)
conn = post(conn, ~p"/sources", source: create_attrs)
assert %{id: id} = redirected_params(conn)
@@ -70,7 +82,7 @@ defmodule PinchflatWeb.SourceControllerTest do
end
test "redirects to onboarding when onboarding", %{conn: conn, create_attrs: create_attrs} do
- expect(YtDlpRunnerMock, :run, 1, &runner_function_mock/3)
+ expect(YtDlpRunnerMock, :run, 1, &runner_function_mock/5)
Settings.set(onboarding: true)
conn = post(conn, ~p"/sources", source: create_attrs)
@@ -99,7 +111,7 @@ defmodule PinchflatWeb.SourceControllerTest do
setup [:create_source]
test "redirects when data is valid", %{conn: conn, source: source, update_attrs: update_attrs} do
- expect(YtDlpRunnerMock, :run, 1, &runner_function_mock/3)
+ expect(YtDlpRunnerMock, :run, 1, &runner_function_mock/5)
conn = put(conn, ~p"/sources/#{source}", source: update_attrs)
assert redirected_to(conn) == ~p"/sources/#{source}"
@@ -118,51 +130,37 @@ defmodule PinchflatWeb.SourceControllerTest do
end
end
- describe "delete source when just deleting the records" do
+ describe "delete source in all cases" do
setup [:create_source]
- test "deletes chosen source and media_items", %{conn: conn, source: source, media_item: media_item} do
- delete(conn, ~p"/sources/#{source}")
-
- assert_raise Ecto.NoResultsError, fn -> Repo.reload!(source) end
- assert_raise Ecto.NoResultsError, fn -> Repo.reload!(media_item) end
- end
-
test "redirects to the sources page", %{conn: conn, source: source} do
conn = delete(conn, ~p"/sources/#{source}")
assert redirected_to(conn) == ~p"/sources"
end
- test "does not delete the files", %{conn: conn, source: source, media_item: media_item} do
+ test "sets marked_for_deletion_at", %{conn: conn, source: source} do
delete(conn, ~p"/sources/#{source}")
- assert File.exists?(media_item.media_filepath)
+ assert Repo.reload!(source).marked_for_deletion_at
+ end
+ end
+
+ describe "delete source when just deleting the records" do
+ setup [:create_source]
+
+ test "enqueues a job without the delete_files arg", %{conn: conn, source: source} do
+ delete(conn, ~p"/sources/#{source}")
+
+ assert [%{args: %{"delete_files" => false}}] = all_enqueued(worker: SourceDeletionWorker)
end
end
describe "delete source when deleting the records and files" do
setup [:create_source]
- setup do
- stub(UserScriptRunnerMock, :run, fn _event_type, _data -> :ok end)
-
- :ok
- end
-
- test "deletes chosen source and media_items", %{conn: conn, source: source, media_item: media_item} do
+ test "enqueues a job without the delete_files arg", %{conn: conn, source: source} do
delete(conn, ~p"/sources/#{source}?delete_files=true")
- assert_raise Ecto.NoResultsError, fn -> Repo.reload!(source) end
- assert_raise Ecto.NoResultsError, fn -> Repo.reload!(media_item) end
- end
-
- test "redirects to the sources page", %{conn: conn, source: source} do
- conn = delete(conn, ~p"/sources/#{source}?delete_files=true")
- assert redirected_to(conn) == ~p"/sources"
- end
-
- test "deletes the files", %{conn: conn, source: source, media_item: media_item} do
- delete(conn, ~p"/sources/#{source}?delete_files=true")
- refute File.exists?(media_item.media_filepath)
+ assert [%{args: %{"delete_files" => true}}] = all_enqueued(worker: SourceDeletionWorker)
end
end
@@ -254,6 +252,23 @@ defmodule PinchflatWeb.SourceControllerTest do
end
end
+ describe "sync_files_on_disk" do
+ test "forces a file sync", %{conn: conn} do
+ source = source_fixture()
+
+ assert [] = all_enqueued(worker: FileSyncingWorker)
+ post(conn, ~p"/sources/#{source.id}/sync_files_on_disk")
+ assert [_] = all_enqueued(worker: FileSyncingWorker)
+ end
+
+ test "redirects to the source page", %{conn: conn} do
+ source = source_fixture()
+
+ conn = post(conn, ~p"/sources/#{source.id}/sync_files_on_disk")
+ assert redirected_to(conn) == ~p"/sources/#{source.id}"
+ end
+ end
+
defp create_source(_) do
source = source_fixture()
media_item = media_item_with_attachments(%{source_id: source.id})
@@ -261,7 +276,7 @@ defmodule PinchflatWeb.SourceControllerTest do
%{source: source, media_item: media_item}
end
- defp runner_function_mock(_url, _opts, _ot) do
+ defp runner_function_mock(_url, :get_source_details, _opts, _ot, _addl) do
{
:ok,
Phoenix.json_library().encode!(%{
diff --git a/test/pinchflat_web/controllers/sources/index_table_live_test.exs b/test/pinchflat_web/controllers/sources/index_table_live_test.exs
new file mode 100644
index 0000000..c53380d
--- /dev/null
+++ b/test/pinchflat_web/controllers/sources/index_table_live_test.exs
@@ -0,0 +1,140 @@
+defmodule PinchflatWeb.Sources.SourceLive.IndexTableLiveTest do
+ use PinchflatWeb.ConnCase
+
+ import Phoenix.LiveViewTest
+ import Pinchflat.SourcesFixtures
+ import Pinchflat.ProfilesFixtures
+
+ alias Pinchflat.Sources.Source
+ alias PinchflatWeb.Sources.SourceLive.IndexTableLive
+
+ describe "initial rendering" do
+ test "lists all sources", %{conn: conn} do
+ source = source_fixture()
+
+ {:ok, _view, html} = live_isolated(conn, IndexTableLive, session: create_session())
+
+ assert html =~ source.custom_name
+ end
+
+ test "omits sources that have marked_for_deletion_at set", %{conn: conn} do
+ source = source_fixture(marked_for_deletion_at: DateTime.utc_now())
+
+ {:ok, _view, html} = live_isolated(conn, IndexTableLive, session: create_session())
+
+ refute html =~ source.custom_name
+ end
+
+ test "omits sources who's media profile has marked_for_deletion_at set", %{conn: conn} do
+ media_profile = media_profile_fixture(marked_for_deletion_at: DateTime.utc_now())
+ source = source_fixture(media_profile_id: media_profile.id)
+
+ {:ok, _view, html} = live_isolated(conn, IndexTableLive, session: create_session())
+
+ refute html =~ source.custom_name
+ end
+ end
+
+ describe "when testing sorting" do
+ test "sorts by the custom_name by default", %{conn: conn} do
+ source1 = source_fixture(custom_name: "Source_B")
+ source2 = source_fixture(custom_name: "Source_A")
+
+ {:ok, view, _html} = live_isolated(conn, IndexTableLive, session: create_session())
+ assert render_element(view, "tbody tr:first-child") =~ source2.custom_name
+ assert render_element(view, "tbody tr:last-child") =~ source1.custom_name
+ end
+
+ test "clicking the row will change the sort direction", %{conn: conn} do
+ source1 = source_fixture(custom_name: "Source_B")
+ source2 = source_fixture(custom_name: "Source_A")
+
+ {:ok, view, _html} = live_isolated(conn, IndexTableLive, session: create_session())
+
+ # Click the row to change the sort direction
+ click_element(view, "th", "Name")
+
+ assert render_element(view, "tbody tr:first-child") =~ source1.custom_name
+ assert render_element(view, "tbody tr:last-child") =~ source2.custom_name
+ end
+
+ test "clicking a different row will sort by that attribute", %{conn: conn} do
+ source1 = source_fixture(custom_name: "Source_A", enabled: true)
+ source2 = source_fixture(custom_name: "Source_A", enabled: false)
+
+ {:ok, view, _html} = live_isolated(conn, IndexTableLive, session: create_session())
+
+ # Click the row to change the sort field
+ click_element(view, "th", "Enabled?")
+
+ assert render_element(view, "tbody tr:first-child") =~ source2.custom_name
+ assert render_element(view, "tbody tr:last-child") =~ source1.custom_name
+
+ # Click the row to again change the sort direcation
+ click_element(view, "th", "Enabled?")
+ assert render_element(view, "tbody tr:first-child") =~ source1.custom_name
+ assert render_element(view, "tbody tr:last-child") =~ source2.custom_name
+ end
+
+ test "name is sorted without case sensitivity", %{conn: conn} do
+ source1 = source_fixture(custom_name: "Source_B")
+ source2 = source_fixture(custom_name: "source_a")
+
+ {:ok, view, _html} = live_isolated(conn, IndexTableLive, session: create_session())
+
+ assert render_element(view, "tbody tr:first-child") =~ source2.custom_name
+ assert render_element(view, "tbody tr:last-child") =~ source1.custom_name
+ end
+ end
+
+ describe "when testing pagination" do
+ test "moving to the next page loads new records", %{conn: conn} do
+ source1 = source_fixture(custom_name: "Source_A")
+ source2 = source_fixture(custom_name: "Source_B")
+
+ session = Map.merge(create_session(), %{"results_per_page" => 1})
+ {:ok, view, _html} = live_isolated(conn, IndexTableLive, session: session)
+
+ assert render_element(view, "tbody") =~ source1.custom_name
+ refute render_element(view, "tbody") =~ source2.custom_name
+
+ click_element(view, "span.pagination-next")
+
+ refute render_element(view, "tbody") =~ source1.custom_name
+ assert render_element(view, "tbody") =~ source2.custom_name
+ end
+ end
+
+ describe "when testing the enable toggle" do
+ test "updates the source's enabled status", %{conn: conn} do
+ source = source_fixture(enabled: true)
+ {:ok, view, _html} = live_isolated(conn, IndexTableLive, session: create_session())
+
+ view
+ |> element(".enabled_toggle_form")
+ |> render_change(%{source: %{"enabled" => false}})
+
+ assert %{enabled: false} = Repo.get!(Source, source.id)
+ end
+ end
+
+ defp click_element(view, selector, text_filter \\ nil) do
+ view
+ |> element(selector, text_filter)
+ |> render_click()
+ end
+
+ defp render_element(view, selector) do
+ view
+ |> element(selector)
+ |> render()
+ end
+
+ defp create_session do
+ %{
+ "initial_sort_key" => :custom_name,
+ "initial_sort_direction" => :asc,
+ "results_per_page" => 10
+ }
+ end
+end
diff --git a/test/pinchflat_web/controllers/sources/media_item_table_live_test.exs b/test/pinchflat_web/controllers/sources/media_item_table_live_test.exs
index 772e305..b941f84 100644
--- a/test/pinchflat_web/controllers/sources/media_item_table_live_test.exs
+++ b/test/pinchflat_web/controllers/sources/media_item_table_live_test.exs
@@ -6,7 +6,7 @@ defmodule PinchflatWeb.Sources.MediaItemTableLiveTest do
import Pinchflat.SourcesFixtures
import Pinchflat.ProfilesFixtures
- alias Pinchflat.Sources.MediaItemTableLive
+ alias PinchflatWeb.Sources.MediaItemTableLive
setup do
source = source_fixture()
diff --git a/test/pinchflat_web/controllers/sources/source_enable_toggle_test.exs b/test/pinchflat_web/controllers/sources/source_enable_toggle_test.exs
new file mode 100644
index 0000000..e1abd1d
--- /dev/null
+++ b/test/pinchflat_web/controllers/sources/source_enable_toggle_test.exs
@@ -0,0 +1,26 @@
+defmodule PinchflatWeb.Sources.SourceLive.SourceEnableToggleTest do
+ use PinchflatWeb.ConnCase
+
+ import Phoenix.LiveViewTest
+
+ alias PinchflatWeb.Sources.SourceLive.SourceEnableToggle
+
+ describe "initial rendering" do
+ test "renders a toggle in the on position if the source is enabled" do
+ source = %{id: 1, enabled: true}
+
+ html = render_component(SourceEnableToggle, %{id: :foo, source: source})
+
+ # This is checking the Alpine attrs which is a good-enough proxy for the toggle position
+ assert html =~ "{ enabled: true }"
+ end
+
+ test "renders a toggle in the off position if the source is disabled" do
+ source = %{id: 1, enabled: false}
+
+ html = render_component(SourceEnableToggle, %{id: :foo, source: source})
+
+ assert html =~ "{ enabled: false }"
+ end
+ end
+end
diff --git a/test/pinchflat_web/helpers/pagination_helpers_test.exs b/test/pinchflat_web/helpers/pagination_helpers_test.exs
new file mode 100644
index 0000000..c15a2d3
--- /dev/null
+++ b/test/pinchflat_web/helpers/pagination_helpers_test.exs
@@ -0,0 +1,96 @@
+defmodule PinchflatWeb.Helpers.PaginationHelpersTest do
+ use Pinchflat.DataCase
+ import Pinchflat.SourcesFixtures
+
+ alias Pinchflat.Sources.Source
+ alias PinchflatWeb.Helpers.PaginationHelpers
+
+ describe "get_pagination_attributes/3" do
+ test "returns the correct pagination attributes" do
+ source_fixture()
+ query = from(s in Source, select: s.id)
+ page = 1
+ records_per_page = 10
+
+ pagination_attributes = PaginationHelpers.get_pagination_attributes(query, page, records_per_page)
+
+ assert pagination_attributes.page == 1
+ assert pagination_attributes.total_pages == 1
+ assert pagination_attributes.total_record_count == 1
+ assert pagination_attributes.limit == 10
+ assert pagination_attributes.offset == 0
+ end
+
+ test "returns the correct pagination attributes when there are multiple pages" do
+ source_fixture()
+ source_fixture()
+
+ query = from(s in Source, select: s.id)
+ page = 1
+ records_per_page = 1
+
+ pagination_attributes = PaginationHelpers.get_pagination_attributes(query, page, records_per_page)
+
+ assert pagination_attributes.page == 1
+ assert pagination_attributes.total_pages == 2
+ assert pagination_attributes.total_record_count == 2
+ assert pagination_attributes.limit == 1
+ assert pagination_attributes.offset == 0
+ end
+
+ test "returns the correct attributes when on a page other than the first" do
+ source_fixture()
+ source_fixture()
+
+ query = from(s in Source, select: s.id)
+ page = 2
+ records_per_page = 1
+
+ pagination_attributes = PaginationHelpers.get_pagination_attributes(query, page, records_per_page)
+
+ assert pagination_attributes.page == 2
+ assert pagination_attributes.total_pages == 2
+ assert pagination_attributes.total_record_count == 2
+ assert pagination_attributes.limit == 1
+ assert pagination_attributes.offset == 1
+ end
+ end
+
+ describe "update_page_number/3" do
+ test "increments the page number" do
+ current_page = 1
+ total_pages = 2
+
+ updated_page = PaginationHelpers.update_page_number(current_page, :inc, total_pages)
+
+ assert updated_page == 2
+ end
+
+ test "decrements the page number" do
+ current_page = 2
+ total_pages = 2
+
+ updated_page = PaginationHelpers.update_page_number(current_page, :dec, total_pages)
+
+ assert updated_page == 1
+ end
+
+ test "doesn't overflow the page number" do
+ current_page = 2
+ total_pages = 2
+
+ updated_page = PaginationHelpers.update_page_number(current_page, :inc, total_pages)
+
+ assert updated_page == 2
+ end
+
+ test "doesn't underflow the page number" do
+ current_page = 1
+ total_pages = 2
+
+ updated_page = PaginationHelpers.update_page_number(current_page, :dec, total_pages)
+
+ assert updated_page == 1
+ end
+ end
+end
diff --git a/test/pinchflat_web/helpers/sorting_helpers_test.exs b/test/pinchflat_web/helpers/sorting_helpers_test.exs
new file mode 100644
index 0000000..7f1d81b
--- /dev/null
+++ b/test/pinchflat_web/helpers/sorting_helpers_test.exs
@@ -0,0 +1,31 @@
+defmodule PinchflatWeb.Helpers.SortingHelpersTest do
+ use Pinchflat.DataCase
+
+ alias PinchflatWeb.Helpers.SortingHelpers
+
+ describe "get_sort_direction/3" do
+ test "returns the correct sort direction when the new sort attribute is the same as the old sort attribute" do
+ old_sort_attr = "name"
+ new_sort_attr = "name"
+ old_sort_direction = :desc
+
+ assert SortingHelpers.get_sort_direction(old_sort_attr, new_sort_attr, old_sort_direction) == :asc
+ end
+
+ test "returns the correct sort direction when the new sort attribute is the same as the old sort attribute in the other direction" do
+ old_sort_attr = "name"
+ new_sort_attr = "name"
+ old_sort_direction = :asc
+
+ assert SortingHelpers.get_sort_direction(old_sort_attr, new_sort_attr, old_sort_direction) == :desc
+ end
+
+ test "returns the correct sort direction when the new sort attribute is different from the old sort attribute" do
+ old_sort_attr = "name"
+ new_sort_attr = "date"
+ old_sort_direction = :asc
+
+ assert SortingHelpers.get_sort_direction(old_sort_attr, new_sort_attr, old_sort_direction) == :asc
+ end
+ end
+end
diff --git a/test/pinchflat_web/plugs_test.exs b/test/pinchflat_web/plugs_test.exs
new file mode 100644
index 0000000..9a02eba
--- /dev/null
+++ b/test/pinchflat_web/plugs_test.exs
@@ -0,0 +1,166 @@
+defmodule PinchflatWeb.PlugsTest do
+ use PinchflatWeb.ConnCase
+
+ alias PinchflatWeb.Plugs
+ alias Pinchflat.Settings
+
+ describe "maybe_basic_auth/2" do
+ setup do
+ old_username = Application.get_env(:pinchflat, :basic_auth_username)
+ old_password = Application.get_env(:pinchflat, :basic_auth_password)
+ old_expose_feed_endpoints = Application.get_env(:pinchflat, :expose_feed_endpoints)
+
+ on_exit(fn ->
+ Application.put_env(:pinchflat, :basic_auth_username, old_username)
+ Application.put_env(:pinchflat, :basic_auth_password, old_password)
+ Application.put_env(:pinchflat, :expose_feed_endpoints, old_expose_feed_endpoints)
+ end)
+
+ :ok
+ end
+
+ test "uses basic auth when expose_feed_endpoints is false" do
+ Application.put_env(:pinchflat, :basic_auth_username, "user")
+ Application.put_env(:pinchflat, :basic_auth_password, "pass")
+ Application.put_env(:pinchflat, :expose_feed_endpoints, false)
+
+ conn = Plugs.maybe_basic_auth(build_conn(), [])
+
+ assert conn.status == 401
+ assert {"www-authenticate", "Basic realm=\"Pinchflat\""} in conn.resp_headers
+ end
+
+ test "supplying the correct username and password allows access" do
+ Application.put_env(:pinchflat, :basic_auth_username, "user")
+ Application.put_env(:pinchflat, :basic_auth_password, "pass")
+ Application.put_env(:pinchflat, :expose_feed_endpoints, false)
+
+ encoded_auth = Plug.BasicAuth.encode_basic_auth("user", "pass")
+
+ conn =
+ build_conn()
+ |> put_req_header("authorization", encoded_auth)
+ |> Plugs.maybe_basic_auth([])
+
+ # nil here means the response is unset, but that's good. It just means we're moving to the next stage
+ assert conn.status == nil
+ end
+
+ test "does not use basic auth when expose_feed_endpoints is true" do
+ Application.put_env(:pinchflat, :basic_auth_username, "user")
+ Application.put_env(:pinchflat, :basic_auth_password, "pass")
+ Application.put_env(:pinchflat, :expose_feed_endpoints, true)
+
+ conn = Plugs.maybe_basic_auth(build_conn(), [])
+
+ assert conn.status == nil
+ end
+
+ test "does not use basic auth when username/password aren't set" do
+ Application.put_env(:pinchflat, :basic_auth_username, nil)
+ Application.put_env(:pinchflat, :basic_auth_password, nil)
+ Application.put_env(:pinchflat, :expose_feed_endpoints, false)
+
+ conn = Plugs.maybe_basic_auth(build_conn(), [])
+
+ # nil here means the response is unset, but that's good. It just means we're moving to the next stage
+ assert conn.status == nil
+ end
+ end
+
+ describe "basic_auth/2" do
+ setup do
+ old_username = Application.get_env(:pinchflat, :basic_auth_username)
+ old_password = Application.get_env(:pinchflat, :basic_auth_password)
+
+ on_exit(fn ->
+ Application.put_env(:pinchflat, :basic_auth_username, old_username)
+ Application.put_env(:pinchflat, :basic_auth_password, old_password)
+ end)
+
+ :ok
+ end
+
+ test "uses basic auth when both username and password are set", %{conn: conn} do
+ Application.put_env(:pinchflat, :basic_auth_username, "user")
+ Application.put_env(:pinchflat, :basic_auth_password, "pass")
+
+ conn = Plugs.basic_auth(conn, [])
+
+ assert conn.status == 401
+ assert {"www-authenticate", "Basic realm=\"Pinchflat\""} in conn.resp_headers
+ end
+
+ test "providing the username and password allows access", %{conn: conn} do
+ Application.put_env(:pinchflat, :basic_auth_username, "user")
+ Application.put_env(:pinchflat, :basic_auth_password, "pass")
+
+ conn =
+ conn
+ |> put_req_header("authorization", Plug.BasicAuth.encode_basic_auth("user", "pass"))
+ |> Plugs.basic_auth([])
+
+ # nil here means the response is unset, but that's good. It just means we're moving to the next stage
+ assert conn.status == nil
+ end
+
+ test "does not use basic auth when either username or password is not set", %{conn: conn} do
+ Application.put_env(:pinchflat, :basic_auth_username, nil)
+ Application.put_env(:pinchflat, :basic_auth_password, "pass")
+
+ conn = Plugs.basic_auth(conn, [])
+
+ assert conn.status == nil
+ end
+
+ test "treats empty strings as not being set when using basic auth", %{conn: conn} do
+ Application.put_env(:pinchflat, :basic_auth_username, "")
+ Application.put_env(:pinchflat, :basic_auth_password, "pass")
+
+ conn = Plugs.basic_auth(conn, [])
+
+ assert conn.status == nil
+ end
+ end
+
+ describe "allow_iframe_embed/2" do
+ test "deletes the x-frame-options header", %{conn: conn} do
+ conn = put_resp_header(conn, "x-frame-options", "DENY")
+ assert ["DENY"] = get_resp_header(conn, "x-frame-options")
+
+ conn = Plugs.allow_iframe_embed(conn, [])
+
+ assert [] = get_resp_header(conn, "x-frame-options")
+ end
+ end
+
+ describe "token_protected_route/2" do
+ test "allows access when the route token is correct", %{conn: conn} do
+ route_token = Settings.get!(:route_token)
+ conn = %{conn | query_params: %{"route_token" => route_token}}
+
+ conn = Plugs.token_protected_route(conn, [])
+
+ # nil here means the response is unset, but that's good. It just means we're moving to the next stage
+ assert conn.status == nil
+ end
+
+ test "does not allow access when the route token is incorrect", %{conn: conn} do
+ conn = %{conn | query_params: %{"route_token" => "incorrect"}}
+
+ conn = Plugs.token_protected_route(conn, [])
+
+ assert conn.status == 401
+ assert conn.resp_body == "Unauthorized"
+ end
+
+ test "does not allow access when the route token is missing", %{conn: conn} do
+ conn = %{conn | query_params: %{}}
+
+ conn = Plugs.token_protected_route(conn, [])
+
+ assert conn.status == 401
+ assert conn.resp_body == "Unauthorized"
+ end
+ end
+end
diff --git a/test/pinchflat_web/routing_test.exs b/test/pinchflat_web/routing_test.exs
deleted file mode 100644
index cbe4dd8..0000000
--- a/test/pinchflat_web/routing_test.exs
+++ /dev/null
@@ -1,108 +0,0 @@
-defmodule PinchflatWeb.RoutingTest do
- use PinchflatWeb.ConnCase
-
- import Pinchflat.SourcesFixtures
-
- describe "basic_auth plug" do
- setup do
- old_username = Application.get_env(:pinchflat, :basic_auth_username)
- old_password = Application.get_env(:pinchflat, :basic_auth_password)
-
- on_exit(fn ->
- Application.put_env(:pinchflat, :basic_auth_username, old_username)
- Application.put_env(:pinchflat, :basic_auth_password, old_password)
- end)
-
- :ok
- end
-
- test "it uses basic auth when both username and password are set", %{conn: conn} do
- Application.put_env(:pinchflat, :basic_auth_username, "user")
- Application.put_env(:pinchflat, :basic_auth_password, "pass")
-
- conn = get(conn, "/")
-
- assert conn.status == 401
- assert {"www-authenticate", "Basic realm=\"Pinchflat\""} in conn.resp_headers
- end
-
- test "providing the username and password allows access", %{conn: conn} do
- Application.put_env(:pinchflat, :basic_auth_username, "user")
- Application.put_env(:pinchflat, :basic_auth_password, "pass")
-
- conn =
- conn
- |> put_req_header("authorization", Plug.BasicAuth.encode_basic_auth("user", "pass"))
- |> get("/")
-
- assert conn.status == 200
- end
-
- test "it does not use basic auth when either username or password is not set", %{conn: conn} do
- Application.put_env(:pinchflat, :basic_auth_username, nil)
- Application.put_env(:pinchflat, :basic_auth_password, "pass")
-
- conn = get(conn, "/")
-
- assert conn.status == 200
- end
-
- test "it treats empty strings as not being set when using basic auth", %{conn: conn} do
- Application.put_env(:pinchflat, :basic_auth_username, "")
- Application.put_env(:pinchflat, :basic_auth_password, "pass")
-
- conn = get(conn, "/")
-
- assert conn.status == 200
- end
- end
-
- describe "maybe_basic_auth plug" do
- setup do
- old_username = Application.get_env(:pinchflat, :basic_auth_username)
- old_password = Application.get_env(:pinchflat, :basic_auth_password)
- old_expose_feed_endpoints = Application.get_env(:pinchflat, :expose_feed_endpoints)
-
- source = source_fixture()
-
- on_exit(fn ->
- Application.put_env(:pinchflat, :basic_auth_username, old_username)
- Application.put_env(:pinchflat, :basic_auth_password, old_password)
- Application.put_env(:pinchflat, :expose_feed_endpoints, old_expose_feed_endpoints)
- end)
-
- {:ok, source: source}
- end
-
- test "uses basic auth when expose_feed_endpoints is false", %{source: source} do
- Application.put_env(:pinchflat, :basic_auth_username, "user")
- Application.put_env(:pinchflat, :basic_auth_password, "pass")
- Application.put_env(:pinchflat, :expose_feed_endpoints, false)
-
- conn = get(build_conn(), "/sources/#{source.uuid}/feed")
-
- assert conn.status == 401
- assert {"www-authenticate", "Basic realm=\"Pinchflat\""} in conn.resp_headers
- end
-
- test "does not use basic auth when expose_feed_endpoints is true", %{source: source} do
- Application.put_env(:pinchflat, :basic_auth_username, "user")
- Application.put_env(:pinchflat, :basic_auth_password, "pass")
- Application.put_env(:pinchflat, :expose_feed_endpoints, true)
-
- conn = get(build_conn(), "/sources/#{source.uuid}/feed")
-
- assert conn.status == 200
- end
-
- test "does not use basic auth when username/password aren't set", %{source: source} do
- Application.put_env(:pinchflat, :basic_auth_username, nil)
- Application.put_env(:pinchflat, :basic_auth_password, nil)
- Application.put_env(:pinchflat, :expose_feed_endpoints, false)
-
- conn = get(build_conn(), "/sources/#{source.uuid}/feed")
-
- assert conn.status == 200
- end
- end
-end
diff --git a/test/support/files/subtitle.srt b/test/support/files/subtitle.srt
new file mode 100644
index 0000000..3251437
--- /dev/null
+++ b/test/support/files/subtitle.srt
@@ -0,0 +1,36 @@
+1
+00:00:00,000 --> 00:00:02,500
+Welcome to the Example Subtitle File!
+
+2
+00:00:03,000 --> 00:00:06,000
+This is a demonstration of SRT subtitles.
+
+3
+00:00:07,000 --> 00:00:10,500
+You can use SRT files to add subtitles to your videos.
+
+4
+00:00:12,000 --> 00:00:15,000
+Each subtitle entry consists of a number, a timecode,
+and the subtitle text.
+
+5
+00:00:16,000 --> 00:00:20,000
+The timecode format is hours:minutes:seconds,milliseconds.
+
+6
+00:00:21,000 --> 00:00:25,000
+You can adjust the timing to match your video.
+
+7
+00:00:26,000 --> 00:00:30,000
+Make sure the subtitle text is clear and readable.
+
+8
+00:00:31,000 --> 00:00:35,000
+And that's how you create an SRT subtitle file!
+
+9
+00:00:36,000 --> 00:00:40,000
+Enjoy adding subtitles to your videos!
diff --git a/test/support/fixtures/media_fixtures.ex b/test/support/fixtures/media_fixtures.ex
index c78a09a..51fd5fd 100644
--- a/test/support/fixtures/media_fixtures.ex
+++ b/test/support/fixtures/media_fixtures.ex
@@ -73,16 +73,19 @@ defmodule Pinchflat.MediaFixtures do
"#{:rand.uniform(1_000_000)}"
])
- stored_media_filepath = Path.join(base_dir, "#media.mp4")
+ stored_media_filepath = Path.join(base_dir, "media.mp4")
thumbnail_filepath = Path.join(base_dir, "thumbnail.jpg")
+ subtitle_filepath = Path.join(base_dir, "subtitle.en.srt")
FilesystemUtils.cp_p!(media_filepath_fixture(), stored_media_filepath)
FilesystemUtils.cp_p!(thumbnail_filepath_fixture(), thumbnail_filepath)
+ FilesystemUtils.cp_p!(subtitle_filepath_fixture(), subtitle_filepath)
merged_attrs =
Map.merge(attrs, %{
media_filepath: stored_media_filepath,
- thumbnail_filepath: thumbnail_filepath
+ thumbnail_filepath: thumbnail_filepath,
+ subtitle_filepaths: [["en", subtitle_filepath]]
})
media_item_fixture(merged_attrs)
@@ -92,8 +95,8 @@ defmodule Pinchflat.MediaFixtures do
media_attributes = %{
id: "video1",
title: "Video 1",
- webpage_url: "https://example.com/video1",
- was_live: false,
+ original_url: "https://example.com/video1",
+ live_status: "not_live",
description: "desc1",
aspect_ratio: 1.67,
duration: 123.45,
@@ -124,6 +127,16 @@ defmodule Pinchflat.MediaFixtures do
])
end
+ def subtitle_filepath_fixture do
+ Path.join([
+ File.cwd!(),
+ "test",
+ "support",
+ "files",
+ "subtitle.srt"
+ ])
+ end
+
def infojson_filepath_fixture do
Path.join([
File.cwd!(),
diff --git a/test/support/fixtures/sources_fixtures.ex b/test/support/fixtures/sources_fixtures.ex
index a06329a..9d699dc 100644
--- a/test/support/fixtures/sources_fixtures.ex
+++ b/test/support/fixtures/sources_fixtures.ex
@@ -20,6 +20,7 @@ defmodule Pinchflat.SourcesFixtures do
Enum.into(
attrs,
%{
+ enabled: true,
collection_name: "Source ##{:rand.uniform(1_000_000)}",
collection_id: Base.encode16(:crypto.hash(:md5, "#{:rand.uniform(1_000_000)}")),
collection_type: "channel",
@@ -80,8 +81,8 @@ defmodule Pinchflat.SourcesFixtures do
%{
id: "video1",
title: "Video 1",
- webpage_url: "https://example.com/video1",
- was_live: false,
+ original_url: "https://example.com/video1",
+ live_status: "not_live",
description: "desc1",
aspect_ratio: 1.67,
duration: 12.34,
@@ -90,8 +91,8 @@ defmodule Pinchflat.SourcesFixtures do
%{
id: "video2",
title: "Video 2",
- webpage_url: "https://example.com/video2",
- was_live: true,
+ original_url: "https://example.com/video2",
+ live_status: "is_live",
description: "desc2",
aspect_ratio: 1.67,
duration: 345.67,
@@ -100,8 +101,8 @@ defmodule Pinchflat.SourcesFixtures do
%{
id: "video3",
title: "Video 3",
- webpage_url: "https://example.com/video3",
- was_live: false,
+ original_url: "https://example.com/video3",
+ live_status: "not_live",
description: "desc3",
aspect_ratio: 1.0,
duration: 678.90,
diff --git a/test/support/scripts/yt-dlp-mocks/101_exit_code.sh b/test/support/scripts/yt-dlp-mocks/101_exit_code.sh
new file mode 100755
index 0000000..7c38957
--- /dev/null
+++ b/test/support/scripts/yt-dlp-mocks/101_exit_code.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+
+exit 101
diff --git a/tooling/.check.exs b/tooling/.check.exs
index 98ca0e2..c0d38c3 100644
--- a/tooling/.check.exs
+++ b/tooling/.check.exs
@@ -17,7 +17,9 @@
{:formatter, env: %{"MIX_ENV" => "test"}},
{:sobelow, "mix sobelow --config"},
{:prettier_formatting, "yarn run lint:check", fix: "yarn run lint:fix"},
- {:npm_test, false}
+ {:npm_test, false},
+ {:gettext, false},
+ {:ex_unit, env: %{"MIX_ENV" => "test", "EX_CHECK" => "1"}}
## curated tools may be disabled (e.g. the check for compilation warnings)
# {:compiler, false},
diff --git a/tooling/.prettierignore b/tooling/.prettierignore
deleted file mode 100644
index 937ae7c..0000000
--- a/tooling/.prettierignore
+++ /dev/null
@@ -1 +0,0 @@
-../assets/vendor/
diff --git a/tooling/version_bump.sh b/tooling/version_bump.sh
new file mode 100755
index 0000000..21a97ef
--- /dev/null
+++ b/tooling/version_bump.sh
@@ -0,0 +1,16 @@
+#!/bin/bash
+
+# Get the current date in the format YYYY.MM.DD (stripping leading zeros)
+DATE=$(date +"%Y.%-m.%-d")
+
+# Get the current version from mix.exs
+VERSION=$(grep "version: " mix.exs | cut -d '"' -f2)
+
+echo "Bumping version from $VERSION to $DATE"
+# Replace the version in mix.exs with the new version
+sed -i "s/version: \"$VERSION\"/version: \"$DATE\"/g" mix.exs
+
+# Run checks to ensure it's a valid mix.exs file
+mix check
+
+echo "Version bumped successfully to $DATE"