Skip to content

Commit

Permalink
Webui revamp (#280)
Browse files Browse the repository at this point in the history
  • Loading branch information
mgdigital authored Oct 14, 2024
1 parent 3063686 commit 309e3b8
Show file tree
Hide file tree
Showing 445 changed files with 39,400 additions and 12,481 deletions.
12 changes: 10 additions & 2 deletions .github/workflows/checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,12 @@ jobs:
- name: Run linters
run: |
nix develop --ignore-environment --keep HOME . --command task lint
# Adding golang-ci lint as a separate step as the Nix package is currently broken
- name: golangci-lint
uses: golangci/golangci-lint-action@v6
with:
version: v1.61
args: --timeout=10m

test:
runs-on: ubuntu-latest
Expand Down Expand Up @@ -65,13 +71,15 @@ jobs:
- uses: cachix/install-nix-action@v26
with:
github_access_token: ${{ secrets.GITHUB_TOKEN }}
- name: Install web UI, apply database migrations, generate code and build web app
- name: Install web UI, apply database migrations, generate code, extract translations and build web app
run: |
nix develop --ignore-environment --command task install-webui
nix develop --ignore-environment --keep HOME --keep POSTGRES_PASSWORD . --command task migrate
nix develop --ignore-environment --keep HOME --keep POSTGRES_PASSWORD . --command task gen
nix develop --ignore-environment . --command task i18n-extract
nix develop --ignore-environment . --command task build-webui
env:
POSTGRES_PASSWORD: postgres
- name: Check nothing changed
run: git diff --exit-code
# excluding the 3rdpartylicenses file in a horrible hack:
run: git diff --exit-code -- . ':(exclude)webui/dist/bitmagnet/3rdpartylicenses.txt'
2 changes: 2 additions & 0 deletions .prettierignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
bitmagnet.io/schemas/**/*.*
webui/dist/**/*.*
webui/src/app/graphql/generated/**/*.*
webui/src/app/i18n/translations/*.json
webui/.angular
4 changes: 2 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM golang:alpine3.18 AS build
FROM golang:1.23.2-alpine3.20 AS build

RUN apk --update add \
gcc \
Expand All @@ -13,7 +13,7 @@ WORKDIR /build

RUN go build -ldflags "-s -w -X github.com/bitmagnet-io/bitmagnet/internal/version.GitTag=$(git describe --tags --always --dirty)"

FROM alpine:3.18
FROM alpine:3.20

RUN apk --update add \
curl \
Expand Down
13 changes: 10 additions & 3 deletions Taskfile.yml
Original file line number Diff line number Diff line change
Expand Up @@ -46,12 +46,19 @@ tasks:
- go run . classifier schema --format json > ./bitmagnet.io/schemas/classifier-0.1.json

gen-webui-graphql:
dir: ./webui
cmds:
- npm run graphql:codegen

i18n-extract:
dir: ./webui
cmds:
- cd webui && npm run graphql:codegen
- npm run i18n:extract

lint:
cmds:
- task lint-golangci
# Removing golang-ci lint as the Nix package is currently broken
# - task lint-golangci
- task lint-webui
- task lint-prettier

Expand Down Expand Up @@ -93,7 +100,7 @@ tasks:
build-webui:
dir: ./webui
cmds:
- npm run build -- -c embedded
- npm run build

build-docsite:
dir: ./bitmagnet.io
Expand Down
4 changes: 2 additions & 2 deletions bitmagnet.io/Gemfile
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
source 'https://rubygems.org'

gem "just-the-docs", "~> 0.6"
gem "just-the-docs", "~> 0.10"
gem "jekyll", "~> 4.3"
gem "jekyll-redirect-from", "~> 0.16"
gem "jekyll-seo-tag"
gem "jekyll-target-blank", "~> 2.0"
gem "kramdown", "~> 2.3"
gem "kramdown", "~> 2.4"
gem "kramdown-parser-gfm", "~> 1.1"
gem "webrick", "~> 1.8"
63 changes: 34 additions & 29 deletions bitmagnet.io/Gemfile.lock
Original file line number Diff line number Diff line change
@@ -1,22 +1,28 @@
GEM
remote: https://rubygems.org/
specs:
addressable (2.8.5)
public_suffix (>= 2.0.2, < 6.0)
addressable (2.8.7)
public_suffix (>= 2.0.2, < 7.0)
bigdecimal (3.1.8)
colorator (1.1.0)
concurrent-ruby (1.2.2)
concurrent-ruby (1.3.4)
em-websocket (0.5.3)
eventmachine (>= 0.12.9)
http_parser.rb (~> 0)
eventmachine (1.2.7)
ffi (1.16.2)
ffi (1.17.0-arm64-darwin)
ffi (1.17.0-x86_64-linux-gnu)
forwardable-extended (2.6.0)
google-protobuf (3.24.3-arm64-darwin)
google-protobuf (3.24.3-x86_64-linux)
google-protobuf (4.28.2-arm64-darwin)
bigdecimal
rake (>= 13)
google-protobuf (4.28.2-x86_64-linux)
bigdecimal
rake (>= 13)
http_parser.rb (0.8.0)
i18n (1.14.1)
i18n (1.14.6)
concurrent-ruby (~> 1.0)
jekyll (4.3.2)
jekyll (4.3.4)
addressable (~> 2.4)
colorator (~> 1.0)
em-websocket (~> 0.5)
Expand Down Expand Up @@ -45,7 +51,7 @@ GEM
nokogiri (~> 1.10)
jekyll-watch (2.2.1)
listen (~> 3.0)
just-the-docs (0.6.2)
just-the-docs (0.10.0)
jekyll (>= 3.8.5)
jekyll-include-cache
jekyll-seo-tag (>= 2.0)
Expand All @@ -55,35 +61,34 @@ GEM
kramdown-parser-gfm (1.1.0)
kramdown (~> 2.0)
liquid (4.0.4)
listen (3.8.0)
listen (3.9.0)
rb-fsevent (~> 0.10, >= 0.10.3)
rb-inotify (~> 0.9, >= 0.9.10)
mercenary (0.4.0)
nokogiri (1.16.5-arm64-darwin)
nokogiri (1.16.7-arm64-darwin)
racc (~> 1.4)
nokogiri (1.16.5-x86_64-linux)
nokogiri (1.16.7-x86_64-linux)
racc (~> 1.4)
pathutil (0.16.2)
forwardable-extended (~> 2.6)
public_suffix (5.0.3)
racc (1.7.3)
rake (13.0.6)
public_suffix (6.0.1)
racc (1.8.1)
rake (13.2.1)
rb-fsevent (0.11.2)
rb-inotify (0.10.1)
rb-inotify (0.11.1)
ffi (~> 1.0)
rexml (3.2.8)
strscan (>= 3.0.9)
rouge (4.1.3)
rexml (3.3.8)
rouge (4.4.0)
safe_yaml (1.0.5)
sass-embedded (1.68.0-arm64-darwin)
google-protobuf (~> 3.23)
sass-embedded (1.68.0-x86_64-linux-gnu)
google-protobuf (~> 3.23)
strscan (3.1.0)
sass-embedded (1.79.5)
google-protobuf (~> 4.27)
rake (>= 13)
sass-embedded (1.79.5-arm64-darwin)
google-protobuf (~> 4.27)
terminal-table (3.0.2)
unicode-display_width (>= 1.1.1, < 3)
unicode-display_width (2.4.2)
webrick (1.8.1)
unicode-display_width (2.6.0)
webrick (1.8.2)

PLATFORMS
arm64-darwin-22
Expand All @@ -95,10 +100,10 @@ DEPENDENCIES
jekyll-redirect-from (~> 0.16)
jekyll-seo-tag
jekyll-target-blank (~> 2.0)
just-the-docs (~> 0.6)
kramdown (~> 2.3)
just-the-docs (~> 0.10)
kramdown (~> 2.4)
kramdown-parser-gfm (~> 1.1)
webrick (~> 1.8)

BUNDLED WITH
2.5.7
2.5.9
23 changes: 11 additions & 12 deletions bitmagnet.io/faq.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,22 +24,17 @@ As a rough guide, you should allow around 300MB RAM for BitMagnet, and at least

## I've started **bitmagnet** for the first time and am not seeing torrents right away, is something wrong?

If everything is working, **bitmagnet** should begin showing torrents in the web UI within a maximum of 10 minutes (which is its cache TTL). The round blue refresh button in the web UI is a cache buster - use it to see new torrent content in real time. Bear in mind that when a torrent is inserted into the database, a background queue job must run before it will become available in the UI. If you're importing thousands or millions of torrents, it might therefore take a while for everything to show. Check the next question if you're still not seeing torrents.
If everything is working, **bitmagnet** should begin showing torrents in the web UI within a maximum of 10 minutes (which is its cache TTL). The refresh button at the top of the torrent listing is a cache buster - use it to see new torrent content in real time. Bear in mind that when a torrent is inserted into the database, a background queue job must run before it will become available in the UI. If you're importing thousands or millions of torrents, it might therefore take a while for everything to show. Check the next question if you're still not seeing torrents.

## **bitmagnet** isn't finding any new torrents, what's wrong?

{: .highlight }
If **bitmagnet** isn't finding new torrents, it probably isn't due to a problem with the software - many people are using it successfully. You may have a networking or firewall issue, or a VPN misconfiguration preventing you from connecting to the DHT. Additionally, the TMDB API is blocked in certain countries; if you are in an affected country you may need to either disable the TMDB integration with the `tmdb.enabled` configuration key, or use a VPN.
If **bitmagnet** isn't finding new torrents, it probably isn't due to a problem with the software - many people are using it successfully. You may have a networking or firewall issue, or a VPN misconfiguration preventing you from connecting to the DHT. Additionally, the TMDB API is blocked in certain countries; if you are in an affected country you may need to either disable the TMDB integration with the `tmdb.enabled` configuration key, or use a VPN. Configuring a personal TMDB API key (or disabling TMDB) will make the queue run a **lot** faster.

Here are some things to check if you're not seeing any new torrents:
**bitmagnet** now shows its health status in the main toolbar: It will show a tick for health, a cross for unhealthy or sometimes 3 dots for pending. Click on it to open the health dialog and check that all workers are running and healthy. The dashboard can be used to monitor queue throughput. On the queues dashboard, the following would indicate a problem:

- Press the round blue refresh button in the UI.
- Visit the metrics endpoint at `/metrics` and check the following metrics:
- `bitmagnet_dht_crawler_persisted_total`: If you see a positive number for this, the DHT crawler is working and has found torrents.
- If torrents are being persisted but you still don't see them in the UI, then check:`bitmagnet_queue_jobs_total{queue="process_torrent",status="processed"}`: If you see a positive number here, then the queue worker is running and processing jobs. If you see `status="failed"` or `status="retry"`, but no `status="processed"`, then something is wrong.
- If no torrents are being persisted, check: `bitmagnet_dht_server_query_success_total` and `bitmagnet_dht_server_query_error_total`. Having some DHT query errors is completely normal, but if you see no successful queries then something is wrong.
- If any of the above metrics are missing, you can assume their value is zero.
- If the metrics confirm a problem, check the logs for errors.
- A high number of pending jobs, and the number of processed jobs not increasing over time
- A high number of failed jobs
- No new jobs being created over time

## Why doesn't **bitmagnet** show me exactly how many torrents it has indexed?

Expand All @@ -51,7 +46,7 @@ This will depend on a number of factors, including your hardware and network con

## How can I see exactly how many torrents **bitmagnet** has crawled in the current session?

Visit the metrics endpoint at `/metrics` and check the metric `bitmagnet_dht_crawler_persisted_total`. `{entity="Torrent"}` corresponds to newly crawled torrents, and `{entity="TorrentsTorrentSource"}` corresponds to torrents that were rediscovered and had their seeders/leechers count, and last-seen-on date updated.
The new dashboard shows throughput of the crawler and job queue. Alternatively, visit the metrics endpoint at `/metrics` and check the metric `bitmagnet_dht_crawler_persisted_total`. `{entity="Torrent"}` corresponds to newly crawled torrents, and `{entity="TorrentsTorrentSource"}` corresponds to torrents that were rediscovered and had their seeders/leechers count, and last-seen-on date updated.

## How are the seeders/leechers numbers determined for torrents crawled from the DHT?

Expand All @@ -69,6 +64,10 @@ No. The DHT crawler works by sampling random info hashes from the network, and w

**bitmagnet** is in early development, and improving the classifier will be an ongoing effort. When new versions are released, you can follow the [reclassify turorial](/tutorials/reprocess-reclassify.html) to reclassify torrents. If you'd like to [improve or customize the classifier](/guides/classifier.html), this is also possible.

## How can I make **bitmagnet** automatically delete torrents I'm not interested in?

A better question would be: why bother? Disk space is inexpensive in the quantities required by **bitmagnet**, and searching is easier than deleting. Nevertheless this is one of the most commonly asked questions, and it is possible to do this by [customizing the classifier](/guides/classifier.html). Please consider the wastage of resources and load on the network created by deleting what you've crawled. Also remember that the classifier isn't perfect: for example, enabling deletion of XXX content will also delete anything that has been mis-identified as XXX by the classifier, preventing you from finding it in future - for example because it contains a rude word. If you are deleting a large proportion of what you're crawling, you are almost certainly deleting over-zealously and you should consider just using one of the many indexer sites instead.

## Can I run multiple **bitmagnet** instances pointing to the same database?

Yes you can, just point multiple instances to one database and it will work - _but_ it will put more load on the database and cause the app to run slower. An alternative is to run multiple instances with multiple databases, and periodically [merge the databases](/guides/backup-restore-merge.html).
3 changes: 1 addition & 2 deletions bitmagnet.io/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -36,13 +36,12 @@ This means that **bitmagnet** is not reliant on any external trackers or torrent
- [x] A GraphQL API: currently this provides a single search query; there is also an embedded GraphQL playground at `/graphql`
- [x] A web user interface implemented in Angular: currently this is a simple single-page application providing a user interface for search queries via the GraphQL API
- [x] [A Torznab-compatible endpoint for integration with the Serverr stack](/guides/servarr-integration.html)
- [x] A WebUI dashboard for monitoring and administration

### High priority features not yet implemented

- [ ] A WebUI dashboard showing things like crawler throughput, task queue, database size etc.
- [ ] Authentication, API keys, access levels etc.
- [ ] An admin API, and in general a more complete GraphQL API
- [ ] A more complete web UI
- [ ] Saved searches for content of particular interest, enabling custom feeds in addition to the following feature
- [ ] Bi-directional integration with the [Prowlarr indexer proxy](https://prowlarr.com/): Currently **bitmagnet** can be added as an indexer in Prowlarr; bi-directional integration would allow **bitmagnet** to crawl content from any indexer configured in Prowlarr, unlocking many new sources of content
- [ ] More documentation and more tests!
Expand Down
14 changes: 14 additions & 0 deletions bitmagnet.io/setup/installation.md
Original file line number Diff line number Diff line change
Expand Up @@ -85,3 +85,17 @@ When referring to CLI commands in the rest of the documentation, for simplicity
```sh
bitmagnet --help
```

## Starting **bitmagnet**

**bitmagnet** runs as multiple worker processes that can be started either individually or all at once. To start all workers, run:

```sh
bitmagnet worker run --all
```

Alternatively, specify individual workers to start:

```sh
bitmagnet worker run --keys=http_server,queue_server,dht_crawler
```
18 changes: 9 additions & 9 deletions flake.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions flake.nix
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
description = "A basic flake with a shell";
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable";
inputs.nixpkgs.url = "github:nixos/nixpkgs/nixos-24.05";
inputs.flake-utils.url = "github:numtide/flake-utils";

outputs = {
Expand All @@ -20,7 +20,7 @@
go-task
golangci-lint
jekyll
nodejs_20
nodejs_22
nodePackages.prettier
protobuf
protoc-gen-go
Expand Down
Loading

0 comments on commit 309e3b8

Please sign in to comment.