From d67a24809b98ccf7a19602e72e19f3e773285f87 Mon Sep 17 00:00:00 2001 From: Alex Ling Date: Sat, 30 Jan 2021 07:39:10 +0000 Subject: [PATCH 01/15] Allow proxy authentication (#141) --- src/config.cr | 1 + src/handlers/auth_handler.cr | 12 +++++++++++- src/util/web.cr | 13 +++++++------ 3 files changed, 19 insertions(+), 7 deletions(-) diff --git a/src/config.cr b/src/config.cr index 3ac5af28..684dfb7e 100644 --- a/src/config.cr +++ b/src/config.cr @@ -22,6 +22,7 @@ class Config property page_margin : Int32 = 30 property disable_login = false property default_username = "" + property auth_proxy_header_name = "" property mangadex = Hash(String, String | Int32).new @[YAML::Field(ignore: true)] diff --git a/src/handlers/auth_handler.cr b/src/handlers/auth_handler.cr index b6891d5c..a8f0087e 100644 --- a/src/handlers/auth_handler.cr +++ b/src/handlers/auth_handler.cr @@ -93,8 +93,18 @@ class AuthHandler < Kemal::Handler call_next env end + def handle_auth_proxy(env) + username = env.request.headers[Config.current.auth_proxy_header_name]? + unless username && Storage.default.username_exists username + return redirect env, "/login" + end + call_next env + end + def call(env) - if request_path_startswith env, ["/opds"] + if !Config.current.auth_proxy_header_name.empty? + handle_auth_proxy env + elsif request_path_startswith env, ["/opds"] handle_opds_auth env else handle_auth env diff --git a/src/util/web.cr b/src/util/web.cr index 03c114d7..1f886a1e 100644 --- a/src/util/web.cr +++ b/src/util/web.cr @@ -3,13 +3,12 @@ # This macro defines `is_admin` when used macro check_admin_access is_admin = false - # The token (if exists) takes precedence over the default user option. - # this is why we check the default username first before checking the - # token. - if Config.current.disable_login - is_admin = Storage.default. - username_is_admin Config.current.default_username + if !Config.current.auth_proxy_header_name.empty? || + Config.current.disable_login + is_admin = Storage.default.username_is_admin get_username env end + + # The token (if exists) takes precedence over other authentication methods. if token = env.session.string? "token" is_admin = Storage.default.verify_admin token end @@ -49,6 +48,8 @@ macro get_username(env) rescue e if Config.current.disable_login Config.current.default_username + elsif (header = Config.current.auth_proxy_header_name) && !header.empty? + env.request.headers[header] else raise e end From 4da263c59453c25f2543cc774b8633b50309ad5f Mon Sep 17 00:00:00 2001 From: Alex Ling Date: Sat, 30 Jan 2021 10:53:14 +0000 Subject: [PATCH 02/15] Rewrite auth_handler Make sure the OPDS pages are accessible without login when login is disabled --- src/handlers/auth_handler.cr | 87 ++++++++++++++++-------------------- src/storage.cr | 8 ---- src/util/web.cr | 11 ++--- 3 files changed, 44 insertions(+), 62 deletions(-) diff --git a/src/handlers/auth_handler.cr b/src/handlers/auth_handler.cr index a8f0087e..692fa8a8 100644 --- a/src/handlers/auth_handler.cr +++ b/src/handlers/auth_handler.cr @@ -15,7 +15,11 @@ class AuthHandler < Kemal::Handler env.response.status_code = 401 env.response.headers["WWW-Authenticate"] = HEADER_LOGIN_REQUIRED env.response.print AUTH_MESSAGE - call_next env + end + + def require_auth(env) + env.session.string "callback", env.request.path + redirect env, "/login" end def validate_token(env) @@ -49,65 +53,50 @@ class AuthHandler < Kemal::Handler Storage.default.verify_user username, password end - def handle_opds_auth(env) - if validate_token(env) || validate_auth_header(env) - call_next env - else - env.response.status_code = 401 - env.response.headers["WWW-Authenticate"] = HEADER_LOGIN_REQUIRED - env.response.print AUTH_MESSAGE - end - end - - def handle_auth(env) + def call(env) + # Skip all authentication if requesting /login, /logout, or a static file if request_path_startswith(env, ["/login", "/logout"]) || requesting_static_file env return call_next(env) end - unless validate_token(env) || Config.current.disable_login - env.session.string "callback", env.request.path - return redirect env, "/login" - end - - if request_path_startswith env, ["/admin", "/api/admin", "/download"] - # The token (if exists) takes precedence over the default user option. - # this is why we check the default username first before checking the - # token. - should_reject = true - if Config.current.disable_login && - Storage.default.username_is_admin Config.current.default_username - should_reject = false + # Check user is logged in + if validate_token env + # Skip if the request has a valid token + elsif Config.current.disable_login + # Check default username if login is disabled + unless Storage.default.username_exists Config.current.default_username + Logger.warn "Default username #{Config.current.default_username} " \ + "does not exist" + return require_auth env end - if env.session.string? "token" - should_reject = !validate_token_admin(env) + elsif !Config.current.auth_proxy_header_name.empty? + # Check auth proxy if present + username = env.request.headers[Config.current.auth_proxy_header_name]? + unless username && Storage.default.username_exists username + Logger.warn "Header #{Config.current.auth_proxy_header_name} unset " \ + "or is not a valid username" + return require_auth env end - if should_reject - env.response.status_code = 403 - send_error_page "HTTP 403: You are not authorized to visit " \ - "#{env.request.path}" - return + elsif request_path_startswith env, ["/opds"] + # Check auth header if requesting an opds page + unless validate_auth_header env + return require_basic_auth env end + else + return require_auth env end - call_next env - end - - def handle_auth_proxy(env) - username = env.request.headers[Config.current.auth_proxy_header_name]? - unless username && Storage.default.username_exists username - return redirect env, "/login" + # Check admin access when requesting an admin page + if request_path_startswith env, %w(/admin /api/admin /download) + unless is_admin? env + env.response.status_code = 403 + return send_error_page "HTTP 403: You are not authorized to visit " \ + "#{env.request.path}" + end end - call_next env - end - def call(env) - if !Config.current.auth_proxy_header_name.empty? - handle_auth_proxy env - elsif request_path_startswith env, ["/opds"] - handle_opds_auth env - else - handle_auth env - end + # Let the request go through if it passes the above checks + call_next env end end diff --git a/src/storage.cr b/src/storage.cr index 2bc6daa1..937200fd 100644 --- a/src/storage.cr +++ b/src/storage.cr @@ -48,14 +48,6 @@ class Storage user_count = db.query_one "select count(*) from users", as: Int32 init_admin if init_user && user_count == 0 - - # Verifies that the default username in config is valid - if Config.current.disable_login - username = Config.current.default_username - unless username_exists username - raise "Default username #{username} does not exist" - end - end end unless @auto_close @db = DB.open "sqlite3://#{@path}" diff --git a/src/util/web.cr b/src/util/web.cr index 1f886a1e..67227c71 100644 --- a/src/util/web.cr +++ b/src/util/web.cr @@ -1,10 +1,9 @@ # Web related helper functions/macros -# This macro defines `is_admin` when used -macro check_admin_access +def is_admin?(env) : Bool is_admin = false if !Config.current.auth_proxy_header_name.empty? || - Config.current.disable_login + Config.current.disable_login is_admin = Storage.default.username_is_admin get_username env end @@ -12,11 +11,13 @@ macro check_admin_access if token = env.session.string? "token" is_admin = Storage.default.verify_admin token end + + is_admin end macro layout(name) base_url = Config.current.base_url - check_admin_access + is_admin = is_admin? env begin page = {{name}} render "src/views/#{{{name}}}.html.ecr", "src/views/layout.html.ecr" @@ -31,7 +32,7 @@ end macro send_error_page(msg) message = {{msg}} base_url = Config.current.base_url - check_admin_access + is_admin = is_admin? env page = "Error" html = render "src/views/message.html.ecr", "src/views/layout.html.ecr" send_file env, html.to_slice, "text/html" From 70d418d1a158a05868196e9e3aaa57cabf1c39ce Mon Sep 17 00:00:00 2001 From: Alex Ling Date: Sat, 30 Jan 2021 17:08:04 +0000 Subject: [PATCH 03/15] Upgrade to MangaDex API v2 --- public/js/download.js | 7 +- shard.lock | 6 +- shard.yml | 2 + src/assets/lang_codes.csv | 41 ------- src/config.cr | 11 +- src/mangadex/api.cr | 217 ------------------------------------- src/mangadex/downloader.cr | 12 +- src/mangadex/ext.cr | 60 ++++++++++ src/routes/api.cr | 11 +- 9 files changed, 92 insertions(+), 275 deletions(-) delete mode 100644 src/assets/lang_codes.csv delete mode 100644 src/mangadex/api.cr create mode 100644 src/mangadex/ext.cr diff --git a/public/js/download.js b/public/js/download.js index e3d653dd..60aac641 100644 --- a/public/js/download.js +++ b/public/js/download.js @@ -27,7 +27,7 @@ const download = () => { $('#download-btn').attr('hidden', ''); $('#download-spinner').removeAttr('hidden'); const ids = selected.map((i, e) => { - return $(e).find('td').first().text(); + return parseInt($(e).find('td').first().text()); }).get(); const chapters = globalChapters.filter(c => ids.indexOf(c.id) >= 0); console.log(ids); @@ -114,8 +114,7 @@ const search = () => { return; } - const cover = baseURL + data.cover_url; - $('#cover').attr("src", cover); + $('#cover').attr("src", data.mainCover); $('#title').text("Title: " + data.title); $('#artist').text("Artist: " + data.artist); $('#author').text("Author: " + data.author); @@ -285,7 +284,7 @@ const buildTable = () => { ${group_str} ${chp.volume} ${chp.chapter} - ${moment.unix(chp.time).fromNow()} + ${moment.unix(chp.timestamp).fromNow()} `; }).join(''); const tbody = `${inner}`; diff --git a/shard.lock b/shard.lock index 99d3c5af..bd355b2c 100644 --- a/shard.lock +++ b/shard.lock @@ -52,9 +52,13 @@ shards: git: https://github.com/hkalexling/koa.git version: 0.5.0 + mangadex: + git: https://github.com/hkalexling/mangadex.git + version: 0.4.0+git.commit.0c2eb69f46d8e2d0ecca3b5ed088dca36a1b5308 + mg: git: https://github.com/hkalexling/mg.git - version: 0.2.0+git.commit.171c46489d991a8353818e00fc6a3c4e0809ded9 + version: 0.3.0+git.commit.a19417abf03eece80039f89569926cff1ce3a1a3 myhtml: git: https://github.com/kostya/myhtml.git diff --git a/shard.yml b/shard.yml index d7505a5a..97bbc5bc 100644 --- a/shard.yml +++ b/shard.yml @@ -43,3 +43,5 @@ dependencies: github: epoch/tallboy mg: github: hkalexling/mg + mangadex: + github: hkalexling/mangadex diff --git a/src/assets/lang_codes.csv b/src/assets/lang_codes.csv deleted file mode 100644 index 035e8805..00000000 --- a/src/assets/lang_codes.csv +++ /dev/null @@ -1,41 +0,0 @@ -Arabic,sa -Bengali,bd -Bulgarian,bg -Burmese,mm -Catalan,ct -Chinese (Simp),cn -Chinese (Trad),hk -Czech,cz -Danish,dk -Dutch,nl -English,gb -Filipino,ph -Finnish,fi -French,fr -German,de -Greek,gr -Hebrew,il -Hindi,in -Hungarian,hu -Indonesian,id -Italian,it -Japanese,jp -Korean,kr -Lithuanian,lt -Malay,my -Mongolian,mn -Other, -Persian,ir -Polish,pl -Portuguese (Br),br -Portuguese (Pt),pt -Romanian,ro -Russian,ru -Serbo-Croatian,rs -Spanish (Es),es -Spanish (LATAM),mx -Swedish,se -Thai,th -Turkish,tr -Ukrainian,ua -Vietnames,vn diff --git a/src/config.cr b/src/config.cr index 3ac5af28..f8746fbd 100644 --- a/src/config.cr +++ b/src/config.cr @@ -27,7 +27,7 @@ class Config @[YAML::Field(ignore: true)] @mangadex_defaults = { "base_url" => "https://mangadex.org", - "api_url" => "https://mangadex.org/api", + "api_url" => "https://mangadex.org/api/v2", "download_wait_seconds" => 5, "download_retries" => 4, "download_queue_db_path" => File.expand_path("~/mango/queue.db", @@ -91,5 +91,14 @@ class Config raise "Login is disabled, but default username is not set. " \ "Please set a default username" end + unless mangadex["api_url"] =~ /\/v2/ + # `Logger.default` is not available yet + Log.setup :debug + Log.warn { "It looks like you are using the deprecated MangaDex API " \ + "v1 in your config file. Please update it to either " \ + "https://mangadex.org/api/v2 or " \ + "https://api.mangadex.org/v2 to suppress this warning." } + mangadex["api_url"] = "https://mangadex.org/api/v2" + end end end diff --git a/src/mangadex/api.cr b/src/mangadex/api.cr deleted file mode 100644 index b521a271..00000000 --- a/src/mangadex/api.cr +++ /dev/null @@ -1,217 +0,0 @@ -require "json" -require "csv" -require "../rename" - -macro string_properties(names) - {% for name in names %} - property {{name.id}} = "" - {% end %} -end - -macro parse_strings_from_json(names) - {% for name in names %} - @{{name.id}} = obj[{{name}}].as_s - {% end %} -end - -macro properties_to_hash(names) - { - {% for name in names %} - "{{name.id}}" => @{{name.id}}.to_s, - {% end %} - } -end - -module MangaDex - class Chapter - string_properties ["lang_code", "title", "volume", "chapter"] - property manga : Manga - property time = Time.local - property id : String - property full_title = "" - property language = "" - property pages = [] of {String, String} # filename, url - property groups = [] of {Int32, String} # group_id, group_name - - def initialize(@id, json_obj : JSON::Any, @manga, - lang : Hash(String, String)) - self.parse_json json_obj, lang - end - - def to_info_json - JSON.build do |json| - json.object do - {% for name in ["id", "title", "volume", "chapter", - "language", "full_title"] %} - json.field {{name}}, @{{name.id}} - {% end %} - json.field "time", @time.to_unix.to_s - json.field "manga_title", @manga.title - json.field "manga_id", @manga.id - json.field "groups" do - json.object do - @groups.each do |gid, gname| - json.field gname, gid - end - end - end - end - end - end - - def parse_json(obj, lang) - parse_strings_from_json ["lang_code", "title", "volume", - "chapter"] - language = lang[@lang_code]? - @language = language if language - @time = Time.unix obj["timestamp"].as_i - suffixes = ["", "_2", "_3"] - suffixes.each do |s| - gid = obj["group_id#{s}"].as_i - next if gid == 0 - gname = obj["group_name#{s}"].as_s - @groups << {gid, gname} - end - - rename_rule = Rename::Rule.new \ - Config.current.mangadex["chapter_rename_rule"].to_s - @full_title = rename rename_rule - rescue e - raise "failed to parse json: #{e}" - end - - def rename(rule : Rename::Rule) - hash = properties_to_hash ["id", "title", "volume", "chapter", - "lang_code", "language", "pages"] - hash["groups"] = @groups.map { |g| g[1] }.join "," - rule.render hash - end - end - - class Manga - string_properties ["cover_url", "description", "title", "author", "artist"] - property chapters = [] of Chapter - property id : String - - def initialize(@id, json_obj : JSON::Any) - self.parse_json json_obj - end - - def to_info_json(with_chapters = true) - JSON.build do |json| - json.object do - {% for name in ["id", "title", "description", "author", "artist", - "cover_url"] %} - json.field {{name}}, @{{name.id}} - {% end %} - if with_chapters - json.field "chapters" do - json.array do - @chapters.each do |c| - json.raw c.to_info_json - end - end - end - end - end - end - end - - def parse_json(obj) - parse_strings_from_json ["cover_url", "description", "title", "author", - "artist"] - rescue e - raise "failed to parse json: #{e}" - end - - def rename(rule : Rename::Rule) - rule.render properties_to_hash ["id", "title", "author", "artist"] - end - end - - class API - use_default - - def initialize - @base_url = Config.current.mangadex["api_url"].to_s || - "https://mangadex.org/api/" - @lang = {} of String => String - CSV.each_row {{read_file "src/assets/lang_codes.csv"}} do |row| - @lang[row[1]] = row[0] - end - end - - def get(url) - headers = HTTP::Headers{ - "User-agent" => "Mangadex.cr", - } - res = HTTP::Client.get url, headers - raise "Failed to get #{url}. [#{res.status_code}] " \ - "#{res.status_message}" if !res.success? - JSON.parse res.body - end - - def get_manga(id) - obj = self.get File.join @base_url, "manga/#{id}" - if obj["status"]? != "OK" - raise "Expecting `OK` in the `status` field. Got `#{obj["status"]?}`" - end - begin - manga = Manga.new id, obj["manga"] - obj["chapter"].as_h.map do |k, v| - chapter = Chapter.new k, v, manga, @lang - manga.chapters << chapter - end - manga - rescue - raise "Failed to parse JSON" - end - end - - def get_chapter(chapter : Chapter) - obj = self.get File.join @base_url, "chapter/#{chapter.id}" - if obj["status"]? == "external" - raise "This chapter is hosted on an external site " \ - "#{obj["external"]?}, and Mango does not support " \ - "external chapters." - end - if obj["status"]? != "OK" - raise "Expecting `OK` in the `status` field. Got `#{obj["status"]?}`" - end - begin - server = obj["server"].as_s - hash = obj["hash"].as_s - chapter.pages = obj["page_array"].as_a.map do |fn| - { - fn.as_s, - "#{server}#{hash}/#{fn.as_s}", - } - end - rescue - raise "Failed to parse JSON" - end - end - - def get_chapter(id : String) - obj = self.get File.join @base_url, "chapter/#{id}" - if obj["status"]? == "external" - raise "This chapter is hosted on an external site " \ - "#{obj["external"]?}, and Mango does not support " \ - "external chapters." - end - if obj["status"]? != "OK" - raise "Expecting `OK` in the `status` field. Got `#{obj["status"]?}`" - end - manga_id = "" - begin - manga_id = obj["manga_id"].as_i.to_s - rescue - raise "Failed to parse JSON" - end - manga = self.get_manga manga_id - chapter = manga.chapters.find { |c| c.id == id }.not_nil! - self.get_chapter chapter - chapter - end - end -end diff --git a/src/mangadex/downloader.cr b/src/mangadex/downloader.cr index e2babb69..c0b50c72 100644 --- a/src/mangadex/downloader.cr +++ b/src/mangadex/downloader.cr @@ -1,5 +1,7 @@ -require "./api" +require "mangadex" require "compress/zip" +require "../rename" +require "./ext" module MangaDex class PageJob @@ -21,7 +23,7 @@ module MangaDex use_default def initialize - @api = API.default + @client = Client.from_config super end @@ -46,7 +48,7 @@ module MangaDex @downloading = true @queue.set_status Queue::JobStatus::Downloading, job begin - chapter = @api.get_chapter(job.id) + chapter = @client.chapter job.id rescue e Logger.error e @queue.set_status Queue::JobStatus::Error, job @@ -73,8 +75,8 @@ module MangaDex # Create a buffered channel. It works as an FIFO queue channel = Channel(PageJob).new chapter.pages.size spawn do - chapter.pages.each_with_index do |tuple, i| - fn, url = tuple + chapter.pages.each_with_index do |url, i| + fn = Path.new(URI.parse(url).path).basename ext = File.extname fn fn = "#{i.to_s.rjust len, '0'}#{ext}" page_job = PageJob.new url, fn, writer, @retries diff --git a/src/mangadex/ext.cr b/src/mangadex/ext.cr new file mode 100644 index 00000000..dfb302c5 --- /dev/null +++ b/src/mangadex/ext.cr @@ -0,0 +1,60 @@ +private macro properties_to_hash(names) + { + {% for name in names %} + "{{name.id}}" => {{name.id}}.to_s, + {% end %} + } +end + +# Monkey-patch the structures in the `mangadex` shard to suit our needs +module MangaDex + struct Client + @@group_cache = {} of String => Group + + def self.from_config : Client + self.new base_url: Config.current.mangadex["base_url"].to_s, + api_url: Config.current.mangadex["api_url"].to_s + end + end + + struct Manga + def rename(rule : Rename::Rule) + rule.render properties_to_hash %w(id title author artist) + end + + def to_info_json + hash = JSON.parse(to_json).as_h + _chapters = chapters.map do |c| + JSON.parse c.to_info_json + end + hash["chapters"] = JSON::Any.new _chapters + hash.to_json + end + end + + struct Chapter + def rename(rule : Rename::Rule) + hash = properties_to_hash %w(id title volume chapter lang_code language) + hash["groups"] = groups.map(&.name).join "," + rule.render hash + end + + def full_title + rule = Rename::Rule.new \ + Config.current.mangadex["chapter_rename_rule"].to_s + rename rule + end + + def to_info_json + hash = JSON.parse(to_json).as_h + hash["language"] = JSON::Any.new language + _groups = {} of String => JSON::Any + groups.each do |g| + _groups[g.name] = JSON::Any.new g.id + end + hash["groups"] = JSON::Any.new _groups + hash["full_title"] = JSON::Any.new full_title + hash.to_json + end + end +end diff --git a/src/routes/api.cr b/src/routes/api.cr index 7fc8b46a..ff6a087f 100644 --- a/src/routes/api.cr +++ b/src/routes/api.cr @@ -414,8 +414,7 @@ struct APIRouter get "/api/admin/mangadex/manga/:id" do |env| begin id = env.params.url["id"] - api = MangaDex::API.default - manga = api.get_manga id + manga = MangaDex::Client.from_config.manga id send_json env, manga.to_info_json rescue e Logger.error e @@ -434,12 +433,12 @@ struct APIRouter chapters = env.params.json["chapters"].as(Array).map { |c| c.as_h } jobs = chapters.map { |chapter| Queue::Job.new( - chapter["id"].as_s, - chapter["manga_id"].as_s, + chapter["id"].as_i64.to_s, + chapter["mangaId"].as_i64.to_s, chapter["full_title"].as_s, - chapter["manga_title"].as_s, + chapter["mangaTitle"].as_s, Queue::JobStatus::Pending, - Time.unix chapter["time"].as_s.to_i + Time.unix chapter["timestamp"].as_i64 ) } inserted_count = Queue.default.push jobs From 1f50785e8f77552712128fee654de3d05e663056 Mon Sep 17 00:00:00 2001 From: Alex Ling Date: Sun, 31 Jan 2021 12:21:22 +0000 Subject: [PATCH 04/15] Rewrite MangaDex download page with Alpine --- public/js/download.js | 503 ++++++++++++++++-------------------- src/config.cr | 2 + src/views/download.html.ecr | 164 +++++++----- 3 files changed, 317 insertions(+), 352 deletions(-) diff --git a/public/js/download.js b/public/js/download.js index 60aac641..4041d6ad 100644 --- a/public/js/download.js +++ b/public/js/download.js @@ -1,304 +1,233 @@ -$(() => { - $('#search-input').keypress(event => { - if (event.which === 13) { - search(); - } - }); - $('.filter-field').each((i, ele) => { - $(ele).change(() => { - buildTable(); - }); - }); -}); -const selectAll = () => { - $('tbody > tr').each((i, e) => { - $(e).addClass('ui-selected'); - }); -}; -const unselect = () => { - $('tbody > tr').each((i, e) => { - $(e).removeClass('ui-selected'); - }); -}; -const download = () => { - const selected = $('tbody > tr.ui-selected'); - if (selected.length === 0) return; - UIkit.modal.confirm(`Download ${selected.length} selected chapters?`).then(() => { - $('#download-btn').attr('hidden', ''); - $('#download-spinner').removeAttr('hidden'); - const ids = selected.map((i, e) => { - return parseInt($(e).find('td').first().text()); - }).get(); - const chapters = globalChapters.filter(c => ids.indexOf(c.id) >= 0); - console.log(ids); - $.ajax({ - type: 'POST', - url: base_url + 'api/admin/mangadex/download', - data: JSON.stringify({ - chapters: chapters - }), - contentType: "application/json", - dataType: 'json' - }) - .done(data => { - console.log(data); - if (data.error) { - alert('danger', `Failed to add chapters to the download queue. Error: ${data.error}`); +const downloadComponent = () => { + return { + chaptersLimit: 1000, + loading: false, + addingToDownload: false, + searchInput: '', + data: {}, + chapters: [], + langChoice: 'All', + groupChoice: 'All', + chapterRange: '', + volumeRange: '', + + get languages() { + const set = new Set(); + if (this.data.chapters) { + this.data.chapters.forEach(chp => { + set.add(chp.language); + }); + } + const ary = [...set].sort(); + ary.unshift('All'); + return ary; + }, + + get groups() { + const set = new Set(); + if (this.data.chapters) { + this.data.chapters.forEach(chp => { + Object.keys(chp.groups).forEach(g => { + set.add(g); + }); + }); + } + const ary = [...set].sort(); + ary.unshift('All'); + return ary; + }, + + init() { + const tableObserver = new MutationObserver(() => { + console.log('table mutated'); + $("#selectable").selectable({ + filter: 'tr' + }); + }); + tableObserver.observe($('table').get(0), { + childList: true, + subtree: true + }); + }, + filtersUpdated() { + if (!this.data.chapters) + this.chapters = []; + const filters = { + chapter: this.parseRange(this.chapterRange), + volume: this.parseRange(this.volumeRange), + lang: this.langChoice, + group: this.groupChoice + }; + console.log('filters:', filters); + let _chapters = this.data.chapters.slice(); + Object.entries(filters).forEach(([k, v]) => { + if (v === 'All') return; + if (k === 'group') { + _chapters = _chapters.filter(c => { + const unescaped_groups = Object.entries(c.groups).map(([g, id]) => this.unescapeHTML(g)); + return unescaped_groups.indexOf(v) >= 0; + }); return; } - const successCount = parseInt(data.success); - const failCount = parseInt(data.fail); - UIkit.modal.confirm(`${successCount} of ${successCount + failCount} chapters added to the download queue. Proceed to the download manager?`).then(() => { - window.location.href = base_url + 'admin/downloads'; + if (k === 'lang') { + _chapters = _chapters.filter(c => c.language === v); + return; + } + const lb = parseFloat(v[0]); + const ub = parseFloat(v[1]); + if (isNaN(lb) && isNaN(ub)) return; + _chapters = _chapters.filter(c => { + const val = parseFloat(c[k]); + if (isNaN(val)) return false; + if (isNaN(lb)) + return val <= ub; + else if (isNaN(ub)) + return val >= lb; + else + return val >= lb && val <= ub; }); - }) - .fail((jqXHR, status) => { - alert('danger', `Failed to add chapters to the download queue. Error: [${jqXHR.status}] ${jqXHR.statusText}`); - }) - .always(() => { - $('#download-spinner').attr('hidden', ''); - $('#download-btn').removeAttr('hidden'); }); - }); -}; -const toggleSpinner = () => { - var attr = $('#spinner').attr('hidden'); - if (attr) { - $('#spinner').removeAttr('hidden'); - $('#search-btn').attr('hidden', ''); - } else { - $('#search-btn').removeAttr('hidden'); - $('#spinner').attr('hidden', ''); - } - searching = !searching; -}; -var searching = false; -var globalChapters; -const search = () => { - if (searching) { - return; - } - $('#manga-details').attr('hidden', ''); - $('#filter-form').attr('hidden', ''); - $('table').attr('hidden', ''); - $('#selection-controls').attr('hidden', ''); - $('#filter-notification').attr('hidden', ''); - toggleSpinner(); - const input = $('input').val(); - - if (input === "") { - toggleSpinner(); - return; - } - - var int_id = -1; - - try { - const path = new URL(input).pathname; - const match = /\/(?:title|manga)\/([0-9]+)/.exec(path); - int_id = parseInt(match[1]); - } catch (e) { - int_id = parseInt(input); - } - - if (int_id <= 0 || isNaN(int_id)) { - alert('danger', 'Please make sure you are using a valid manga ID or manga URL from Mangadex.'); - toggleSpinner(); - return; - } - - $.getJSON(`${base_url}api/admin/mangadex/manga/${int_id}`) - .done((data) => { - if (data.error) { - alert('danger', 'Failed to get manga info. Error: ' + data.error); + console.log('filtered chapters:', _chapters); + this.chapters = _chapters; + }, + search() { + if (this.loading || this.searchInput === '') return; + this.loading = true; + this.data = {}; + + var int_id = -1; + try { + const path = new URL(this.searchInput).pathname; + const match = /\/(?:title|manga)\/([0-9]+)/.exec(path); + int_id = parseInt(match[1]); + } catch (e) { + int_id = parseInt(this.searchInput); + } + if (int_id <= 0 || isNaN(int_id)) { + alert('danger', 'Please make sure you are using a valid manga ID or manga URL from Mangadex.'); + this.loading = false; return; } - $('#cover').attr("src", data.mainCover); - $('#title').text("Title: " + data.title); - $('#artist').text("Artist: " + data.artist); - $('#author').text("Author: " + data.author); - - $('#manga-details').removeAttr('hidden'); - - console.log(data.chapters); - globalChapters = data.chapters; - - let langs = new Set(); - let group_names = new Set(); - data.chapters.forEach(chp => { - Object.entries(chp.groups).forEach(([k, v]) => { - group_names.add(k); + $.getJSON(`${base_url}api/admin/mangadex/manga/${int_id}`) + .done((data) => { + if (data.error) { + alert('danger', 'Failed to get manga info. Error: ' + data.error); + return; + } + + this.data = data; + this.chapters = data.chapters; + }) + .fail((jqXHR, status) => { + alert('danger', `Failed to get manga info. Error: [${jqXHR.status}] ${jqXHR.statusText}`); + }) + .always(() => { + this.loading = false; }); - langs.add(chp.language); - }); - - const comp = (a, b) => { - var ai; - var bi; - try { - ai = parseFloat(a); - } catch (e) {} - try { - bi = parseFloat(b); - } catch (e) {} - if (typeof ai === 'undefined') return -1; - if (typeof bi === 'undefined') return 1; - if (ai < bi) return 1; - if (ai > bi) return -1; - return 0; - }; - langs = [...langs].sort(); - group_names = [...group_names].sort(); + }, - langs.unshift('All'); - group_names.unshift('All'); + parseRange(str) { + const regex = /^[\t ]*(?:(?:(<|<=|>|>=)[\t ]*([0-9]+))|(?:([0-9]+))|(?:([0-9]+)[\t ]*-[\t ]*([0-9]+))|(?:[\t ]*))[\t ]*$/m; + const matches = str.match(regex); + var num; - $('select#lang-select').html(langs.map(e => ``).join('')); - $('select#group-select').html(group_names.map(e => ``).join('')); + if (!matches) { + return [null, null]; + } else if (typeof matches[1] !== 'undefined' && typeof matches[2] !== 'undefined') { + // e.g., <= 30 + num = parseInt(matches[2]); + if (isNaN(num)) { + return [null, null]; + } + switch (matches[1]) { + case '<': + return [null, num - 1]; + case '<=': + return [null, num]; + case '>': + return [num + 1, null]; + case '>=': + return [num, null]; + } + } else if (typeof matches[3] !== 'undefined') { + // a single number + num = parseInt(matches[3]); + if (isNaN(num)) { + return [null, null]; + } + return [num, num]; + } else if (typeof matches[4] !== 'undefined' && typeof matches[5] !== 'undefined') { + // e.g., 10 - 23 + num = parseInt(matches[4]); + const n2 = parseInt(matches[5]); + if (isNaN(num) || isNaN(n2) || num > n2) { + return [null, null]; + } + return [num, n2]; + } else { + // empty or space only + return [null, null]; + } + }, - $('#filter-form').removeAttr('hidden'); + unescapeHTML(str) { + var elt = document.createElement("span"); + elt.innerHTML = str; + return elt.innerText; + }, - buildTable(); - }) - .fail((jqXHR, status) => { - alert('danger', `Failed to get manga info. Error: [${jqXHR.status}] ${jqXHR.statusText}`); - }) - .always(() => { - toggleSpinner(); - }); -}; -const parseRange = str => { - const regex = /^[\t ]*(?:(?:(<|<=|>|>=)[\t ]*([0-9]+))|(?:([0-9]+))|(?:([0-9]+)[\t ]*-[\t ]*([0-9]+))|(?:[\t ]*))[\t ]*$/m; - const matches = str.match(regex); - var num; + selectAll() { + $('tbody > tr').each((i, e) => { + $(e).addClass('ui-selected'); + }); + }, - if (!matches) { - alert('danger', `Failed to parse filter input ${str}`); - return [null, null]; - } else if (typeof matches[1] !== 'undefined' && typeof matches[2] !== 'undefined') { - // e.g., <= 30 - num = parseInt(matches[2]); - if (isNaN(num)) { - alert('danger', `Failed to parse filter input ${str}`); - return [null, null]; - } - switch (matches[1]) { - case '<': - return [null, num - 1]; - case '<=': - return [null, num]; - case '>': - return [num + 1, null]; - case '>=': - return [num, null]; - } - } else if (typeof matches[3] !== 'undefined') { - // a single number - num = parseInt(matches[3]); - if (isNaN(num)) { - alert('danger', `Failed to parse filter input ${str}`); - return [null, null]; - } - return [num, num]; - } else if (typeof matches[4] !== 'undefined' && typeof matches[5] !== 'undefined') { - // e.g., 10 - 23 - num = parseInt(matches[4]); - const n2 = parseInt(matches[5]); - if (isNaN(num) || isNaN(n2) || num > n2) { - alert('danger', `Failed to parse filter input ${str}`); - return [null, null]; - } - return [num, n2]; - } else { - // empty or space only - return [null, null]; - } -}; -const getFilters = () => { - const filters = {}; - $('.uk-select').each((i, ele) => { - const id = $(ele).attr('id'); - const by = id.split('-')[0]; - const choice = $(ele).val(); - filters[by] = choice; - }); - filters.volume = parseRange($('#volume-range').val()); - filters.chapter = parseRange($('#chapter-range').val()); - return filters; -}; -const buildTable = () => { - $('table').attr('hidden', ''); - $('#selection-controls').attr('hidden', ''); - $('#filter-notification').attr('hidden', ''); - console.log('rebuilding table'); - const filters = getFilters(); - console.log('filters:', filters); - var chapters = globalChapters.slice(); - Object.entries(filters).forEach(([k, v]) => { - if (v === 'All') return; - if (k === 'group') { - chapters = chapters.filter(c => { - unescaped_groups = Object.entries(c.groups).map(([g, id]) => unescapeHTML(g)); - return unescaped_groups.indexOf(v) >= 0; + clearSelection() { + $('tbody > tr').each((i, e) => { + $(e).removeClass('ui-selected'); + }); + }, + + download() { + const selected = $('tbody > tr.ui-selected'); + if (selected.length === 0) return; + UIkit.modal.confirm(`Download ${selected.length} selected chapters?`).then(() => { + const ids = selected.map((i, e) => { + return parseInt($(e).find('td').first().text()); + }).get(); + const chapters = this.chapters.filter(c => ids.indexOf(c.id) >= 0); + console.log(ids); + this.addingToDownload = true; + $.ajax({ + type: 'POST', + url: `${base_url}api/admin/mangadex/download`, + data: JSON.stringify({ + chapters: chapters + }), + contentType: "application/json", + dataType: 'json' + }) + .done(data => { + console.log(data); + if (data.error) { + alert('danger', `Failed to add chapters to the download queue. Error: ${data.error}`); + return; + } + const successCount = parseInt(data.success); + const failCount = parseInt(data.fail); + UIkit.modal.confirm(`${successCount} of ${successCount + failCount} chapters added to the download queue. Proceed to the download manager?`).then(() => { + window.location.href = base_url + 'admin/downloads'; + }); + }) + .fail((jqXHR, status) => { + alert('danger', `Failed to add chapters to the download queue. Error: [${jqXHR.status}] ${jqXHR.statusText}`); + }) + .always(() => { + this.addingToDownload = false; + }); }); - return; - } - if (k === 'lang') { - chapters = chapters.filter(c => c.language === v); - return; } - const lb = parseFloat(v[0]); - const ub = parseFloat(v[1]); - if (isNaN(lb) && isNaN(ub)) return; - chapters = chapters.filter(c => { - const val = parseFloat(c[k]); - if (isNaN(val)) return false; - if (isNaN(lb)) - return val <= ub; - else if (isNaN(ub)) - return val >= lb; - else - return val >= lb && val <= ub; - }); - }); - console.log('filtered chapters:', chapters); - $('#count-text').text(`${chapters.length} chapters found`); - - const chaptersLimit = 1000; - if (chapters.length > chaptersLimit) { - $('#filter-notification').text(`Mango can only list ${chaptersLimit} chapters, but we found ${chapters.length} chapters in this manga. Please use the filter options above to narrow down your search.`); - $('#filter-notification').removeAttr('hidden'); - return; - } - - const inner = chapters.map(chp => { - const group_str = Object.entries(chp.groups).map(([k, v]) => { - return `${k}`; - }).join(' | '); - return ` - ${chp.id} - ${chp.title} - ${chp.language} - ${group_str} - ${chp.volume} - ${chp.chapter} - ${moment.unix(chp.timestamp).fromNow()} - `; - }).join(''); - const tbody = `${inner}`; - $('tbody').remove(); - $('table').append(tbody); - $('table').removeAttr('hidden'); - $("#selectable").selectable({ - filter: 'tr' - }); - $('#selection-controls').removeAttr('hidden'); -}; - -const unescapeHTML = (str) => { - var elt = document.createElement("span"); - elt.innerHTML = str; - return elt.innerText; + }; }; diff --git a/src/config.cr b/src/config.cr index f8746fbd..6c31c5bc 100644 --- a/src/config.cr +++ b/src/config.cr @@ -100,5 +100,7 @@ class Config "https://api.mangadex.org/v2 to suppress this warning." } mangadex["api_url"] = "https://mangadex.org/api/v2" end + mangadex["api_url"] = mangadex["api_url"].to_s.rstrip "/" + mangadex["base_url"] = mangadex["base_url"].to_s.rstrip "/" end end diff --git a/src/views/download.html.ecr b/src/views/download.html.ecr index 402c137b..7a968587 100644 --- a/src/views/download.html.ecr +++ b/src/views/download.html.ecr @@ -1,81 +1,115 @@

Download from MangaDex

-
-
- -
-
- - -
-
-