From 84d402a8e87a7396492214a845f0831ad06ee953 Mon Sep 17 00:00:00 2001 From: Janus Troelsen Date: Sat, 12 Feb 2022 22:39:00 -0600 Subject: [PATCH 1/7] Add new-browse on different endpoint --- datafiles/static/new-browse.js | 398 ++++++++++++++++++ datafiles/templates/Html/new-browse.html.st | 247 +++++++++++ hackage-server.cabal | 20 + src/Distribution/Server/Features.hs | 12 + src/Distribution/Server/Features/Browse.hs | 127 ++++++ .../Server/Features/Browse/ApplyFilter.hs | 106 +++++ .../Server/Features/Browse/Options.hs | 78 ++++ .../Server/Features/Browse/Parsers.hs | 161 +++++++ src/Distribution/Server/Features/Html.hs | 10 + tests/BrowseQueryParserTest.hs | 73 ++++ .../Server/Packages/UnpackTest.hs | 6 +- tests/PaginationTest.hs | 37 ++ 12 files changed, 1272 insertions(+), 3 deletions(-) create mode 100644 datafiles/static/new-browse.js create mode 100644 datafiles/templates/Html/new-browse.html.st create mode 100644 src/Distribution/Server/Features/Browse.hs create mode 100644 src/Distribution/Server/Features/Browse/ApplyFilter.hs create mode 100644 src/Distribution/Server/Features/Browse/Options.hs create mode 100644 src/Distribution/Server/Features/Browse/Parsers.hs create mode 100644 tests/BrowseQueryParserTest.hs create mode 100644 tests/PaginationTest.hs diff --git a/datafiles/static/new-browse.js b/datafiles/static/new-browse.js new file mode 100644 index 000000000..aed8269ca --- /dev/null +++ b/datafiles/static/new-browse.js @@ -0,0 +1,398 @@ +const d = document; + +const initialParams = new URL(d.location).searchParams; +// This parameter is named 'terms' because it is from before filters were +// introduced. But we will parse it a normal search string (including filters) +const initialSearchQuery = initialParams.has('terms') ? initialParams.get('terms') : '' +d.querySelector("#searchQuery").value = initialSearchQuery; + +class Model { + page = 0 + numberOfResults = 0 + column = 'default' + direction = 'ascending' + searchQuery = initialSearchQuery + flipDirection() { + if (this.direction === 'ascending') { + return ['ascending', this.direction = 'descending']; + } else { + return ['descending', this.direction = 'ascending']; + } + } +} + +const state = new Model(); + +addEventListener('popstate', async (evt) => { + if (evt.state === null) { + return; + } + state.page = evt.state.page; + state.column = evt.state.column; + state.direction = evt.state.direction; + state.searchQuery = evt.state.searchQuery; + d.querySelector("#searchQuery").value = evt.state.searchQuery; + await refresh(); +}); + +const get = () => new Promise((resolve,reject) => { + const formData = new FormData(); + const obj = + { page: state.page + , sort: {column: state.column, direction: state.direction} + , searchQuery: state.searchQuery + }; + formData.append('browseOptions', JSON.stringify(obj)); + fetch('/newpkglist', {method:'POST', body: formData}).then(async (response) => { + if (!response.ok) { + const el = d.querySelector("#fatalError"); + el.style.display = "block"; + const err = await response.text(); + el.textContent = "Error with Hackage server: " + err; + console.log(obj); + reject(new Error("fetch failed: " + err)); + } else { + resolve(response.json()); + } + }); +}); + +const createName = (nameDict) => { + const name = d.createElement("td"); + const nameLink = d.createElement("a"); + nameLink.setAttribute("href", nameDict.uri); + nameLink.appendChild(d.createTextNode(nameDict.display)); + name.appendChild(nameLink); + return name; +} + +const createSimpleText = (text) => { + const el = d.createElement("td"); + el.appendChild(d.createTextNode(text)); + return el; +} + +// Used with renderUser and renderTag results from backend +const createCommaList = (arr) => { + const ul = d.createElement("ul"); + ul.classList.add("commaList"); + for (const dict of arr) { + const li = d.createElement("li"); + const a = d.createElement("a"); + a.setAttribute("href", dict.uri); + a.appendChild(d.createTextNode(dict.display)); + li.appendChild(a); + ul.appendChild(li); + } + return ul; +} + +const createTags = (tagsArr) => { + const el = d.createElement("td"); + if (tagsArr === []) { + return el; + } + el.appendChild(d.createTextNode("(")); + const ul = createCommaList(tagsArr); + el.appendChild(ul); + el.appendChild(d.createTextNode(")")); + return el; +}; + +const createLastUpload = (lastUploadISO8601) => { + const el = d.createElement("td"); + const date = lastUploadISO8601.substr(0, "0000-00-00".length); + el.setAttribute("title", new Date(lastUploadISO8601).toLocaleString()); + el.classList.add("lastUpload"); + el.appendChild(d.createTextNode(date)); + return el; +}; + +const createMaintainers = (maintainersArr) => { + const el = d.createElement("td"); + if (maintainersArr === []) { + return el; + } + const ul = createCommaList(maintainersArr); + el.appendChild(ul); + return el; +}; + +const replaceRows = (response) => { + const l = d.querySelector("#listing"); + l.replaceChildren(); + for (const row of response.pageContents) { + const tr = d.createElement("tr"); + tr.appendChild(createName(row.name)); + tr.appendChild(createSimpleText(row.downloads)); + tr.appendChild(createSimpleText(row.votes)); + tr.appendChild(createSimpleText(row.description)); + tr.appendChild(createTags(row.tags)); + tr.appendChild(createLastUpload(row.lastUpload)); + tr.appendChild(createMaintainers(row.maintainers)); + l.appendChild(tr); + } +}; + +const removeSortIndicator = () => { + // No column is actually visible for the default sort mode, + // so there is nothing to do in that case. + if (state.column !== 'default') { + const columnHeader = d.querySelector("#arrow-" + state.column); + columnHeader.removeAttribute("aria-sort"); + const oldClasses = columnHeader.classList; + oldClasses.remove('ascending'); + oldClasses.remove('descending'); + } +} + +export const sort = async (column) => { + if (state.column === column) { + const [oldCls, newCls] = state.flipDirection(); + const columnHeader = d.querySelector("#arrow-" + column); + const classes = columnHeader.classList; + classes.toggle(oldCls); + classes.toggle(newCls); + columnHeader.setAttribute("aria-sort", newCls); + } else { + removeSortIndicator(); + + state.direction = 'ascending'; + state.column = column; + + // Add sort indicator on new column + const columnHeader = d.querySelector("#arrow-" + column); + columnHeader.classList.add("ascending"); + columnHeader.setAttribute("aria-sort", "ascending"); + } + state.page = 0; + await refresh(); +}; + +const pageSize = 50; // make sure it is kept in sync with backend + +const pageAvailable = (page) => { + if (page < 0) return false; + if (page === 0) return true; + return page * pageSize < state.numberOfResults; +} + +const changePage = async (candidate) => { + if (!pageAvailable(candidate)) { + return; + } + state.page = candidate; + history.pushState(state, d.title); + await refresh(); + scrollTo(0, d.body.scrollHeight); +}; + +const createIndexIndicator = () => { + const el = d.createElement("div"); + const minIdx = state.page * pageSize + 1; + let maxIdx = (state.page + 1) * pageSize; + maxIdx = Math.min(maxIdx, state.numberOfResults); + let fullMsg; + if (state.numberOfResults === 0) { + fullMsg = "No results found."; + } else { + const entriesText = state.numberOfResults === 1 ? "entry" : "entries"; + fullMsg = `Showing ${minIdx} to ${maxIdx} of ${state.numberOfResults} ${entriesText}`; + } + el.appendChild(d.createTextNode(fullMsg)); + return el; +}; + +const refresh = async () => { + const res = await get(); + state.numberOfResults = res.numberOfResults; + replaceRows(res); + const container = d.querySelector("#paginatorContainer"); + container.replaceChildren(); + container.appendChild(createIndexIndicator()); + container.appendChild(createPaginator()); + if (state.searchQuery.trim() === "") { + d.querySelector("#newBrowseFooter").style.display = "none"; + } else { + d.querySelector("#newBrowseFooter").style.display = "block"; + const url = new URL(hoogleNoParam); + url.searchParams.set("hoogle", state.searchQuery); + d.querySelector("#hoogleLink").setAttribute("href", url); + } +}; + +export const submitSearch = async (evt) => { + if (evt) evt.preventDefault(); + state.searchQuery = d.querySelector("#searchQuery").value; + removeSortIndicator(); + state.column = 'default'; + state.direction = 'ascending'; + state.page = 0; + + const url = new URL(d.location); + url.searchParams.set('terms', state.searchQuery); + history.pushState(state, d.title, url); + + await refresh(); +}; + +const createPageLink = (num) => { + const a = d.createElement("a"); + if (state.page == num) a.classList.add("current"); + a.setAttribute("href", "#"); + a.addEventListener('click', (evt) => { + evt.preventDefault(); + changePage(num); + }); + a.appendChild(d.createTextNode(num + 1)); + return a; +}; + +const createPrevNext = (prevNextNum, cond, txt) => { + const el = d.createElement(cond ? "span" : "a"); + el.setAttribute("href", "#"); + el.addEventListener('click', (evt) => { + evt.preventDefault(); + changePage(prevNextNum); + }); + if (cond) el.classList.add("disabled"); + el.appendChild(d.createTextNode(txt)); + return el; +}; + +const createEllipsis = () => { + const el = d.createElement("span"); + el.innerHTML = "…"; + return el; +}; + +const createPaginator = () => { + const maxPage = maxAvailablePage(state.numberOfResults); + + const pag = d.createElement("div"); + pag.classList.add("paginator"); + pag.appendChild(createPrevNext(state.page - 1, state.page === 0, "Previous")); + // note that page is zero-indexed + if (maxPage <= 4) { + // No ellipsis + for (let i = 0; i <= maxPage; i++) { + pag.appendChild(createPageLink(i)); + } + } else if (state.page <= 3) { + // One ellipsis, at the end + for (let i = 0; i <= 4; i++) { + pag.appendChild(createPageLink(i)); + } + pag.appendChild(createEllipsis()); + pag.appendChild(createPageLink(maxPage)); + } else if (state.page + 3 >= maxPage) { + // One ellipsis, at the start + pag.appendChild(createPageLink(0)); + pag.appendChild(createEllipsis()); + for (let i = maxPage - 4; i <= maxPage; i++) { + pag.appendChild(createPageLink(i)); + } + } else { + // Two ellipses, at both ends + pag.appendChild(createPageLink(0)); + pag.appendChild(createEllipsis()); + for (let i = state.page - 1; i <= state.page + 1; i++) { + pag.appendChild(createPageLink(i)); + } + pag.appendChild(createEllipsis()); + pag.appendChild(createPageLink(maxPage)); + } + const isNowOnLastPage = state.page === maxPage; + pag.appendChild(createPrevNext(state.page + 1, isNowOnLastPage, "Next")); + + return pag; +}; + +const maxAvailablePage = (numberOfResults) => { + if (numberOfResults === 0) numberOfResults++; + return Math.floor((numberOfResults - 1) / pageSize); +}; + +const hoogleNoParam = "https://hoogle.haskell.org"; + +let expanded = false; + +export const toggleAdvanced = () => { + if (expanded) { + d.querySelector("#toggleAdvanced").setAttribute("aria-expanded", "false"); + d.querySelector("#chevron").innerHTML = "▸"; + d.querySelector("#advancedForm").style.display = "none"; + } else { + d.querySelector("#toggleAdvanced").setAttribute("aria-expanded", "true"); + d.querySelector("#chevron").innerHTML = "▾"; + d.querySelector("#advancedForm").style.display = "block"; + } + expanded = !expanded; +}; + +export const appendDeprecated = async (evt) => { + if (evt) evt.preventDefault(); + d.querySelector("#searchQuery").value += " (deprecated:any)"; + await submitSearch(); +}; + +const isNonNegativeFloatString = (n) => { + // If there is a decimal separator, digits before it are required. + const parsed = parseFloat(n.match(/^\d+(\.\d+)?$/)); + return parsed >= 0; +}; + +export const validateAgeOfLastUL = () => { + const el = d.querySelector("#advAgeLastUL"); + const duration = el.value.trim(); + if (duration === "" + || !(["d", "w", "m", "y"].includes(duration.substr(-1, 1))) + || !isNonNegativeFloatString(duration.substr(0, duration.length - 1))) { + el.setCustomValidity("Must be positive and end in d(ay), w(eek), m(onth) or y(ear)"); + return false; + } + el.setCustomValidity(""); + return duration; +}; + +export const appendAgeOfLastUL = async (evt) => { + if (evt) evt.preventDefault(); + const maybeDuration = validateAgeOfLastUL(); + if (maybeDuration === false) { + return; + } + const duration = maybeDuration; + d.querySelector("#searchQuery").value += ` (ageOfLastUpload < ${duration})`; + await submitSearch(); +}; + +export const validateTag = () => { + const el = d.querySelector("#advTag"); + const tag = el.value.trim(); + if (tag === "" || !(/^[a-z0-9]+$/i.test(tag))) { + el.setCustomValidity("Tag cannot be empty and must be alphanumeric and ASCII"); + return false; + } + el.setCustomValidity(""); + return tag; +} + +export const appendTag = async (evt) => { + if (evt) evt.preventDefault(); + const maybeTag = validateTag(); + if (maybeTag === false) { + return; + } + const tag = maybeTag; + d.querySelector("#searchQuery").value += ` (tag:${tag})`; + await submitSearch(); +}; + +export const appendRating = async (evt) => { + if (evt) evt.preventDefault(); + const rating = d.querySelector("#advRatingSlider").value; + d.querySelector("#searchQuery").value += ` (rating >= ${rating})`; + await submitSearch(); +}; + +await refresh(); diff --git a/datafiles/templates/Html/new-browse.html.st b/datafiles/templates/Html/new-browse.html.st new file mode 100644 index 000000000..1373632de --- /dev/null +++ b/datafiles/templates/Html/new-browse.html.st @@ -0,0 +1,247 @@ + + + + $hackageCssTheme()$ + + $heading$ | Hackage + + + + $hackagePageHeader()$ +
+

$heading$

+ $content$ +
+
+ + +
+

+
+
Also show deprecated packages
+
Last uploaded version younger than
+
Only show packages with tag
+
Rating greater than, or equal to2
+
Usage
+

Apart from just writing words to search everywhere in package metadata, + you can also use filters in the search query string input field above. + Filters are surrounded by parentheses. + All filters have to pass for every package shown in the result, + that is, it is a + + logical conjunction. +

+
+
(downloads > 1000)
+
Only show packages with more than 1000 downloads within the last 30 days. The download count is inexact because Hackage uses a content delivery network.
+
(lastUpload < 2021-10-29)
+
Only show packages for which the last upload was before (i.e. excluding) the given UTC date in the 'complete date' format as specified using ISO 8601.
+
(lastUpload = 2021-10-29)
+
Only show packages for which the last upload was within the 24 hours of the given UTC date.
+
(maintainer:SimonMarlow)
+
Only show packages for which the maintainers list includes the user name SimonMarlow.
+
(tag:bsd3)
+
Only show packages with the bsd3 tag.
+
(not tag:network)
+
Do not show packages with the network tag. The not operator can also be used with other filters.
+
(ageOfLastUpload > 5d)
+
Only show packages uploaded more than five days ago.
+
(ageOfLastUpload > 4w)
+
Only show packages uploaded more than four weeks ago. A week has seven days.
+
(ageOfLastUpload < 1m)
+
Only show packages last uploaded less than one month ago. A month is considered to have 30.437 days.
+
(ageOfLastUpload < 2.5y)
+
Only show packages last uploaded less than 2.5 years ago. A year is considered to be 365.25 days.
+
(rating > 2.5)
+
Only show packages with a rating of more than 2.5. The dot is the only accepted decimal separator.
+
(rating /= 0)
+
Only show packages with a rating unequal to zero.
+
(deprecated:any)
+
Do not filter out deprecated packages. This must be explicitly added if desired.
+
(deprecated:true)
+
Only show deprecated packages.
+
(deprecated:false)
+
Only show packages that are not deprecated. If no other deprecation filter is given, this filter is automatically added.
+
+
+ + + + + + + + + + + + + +
NameDLsRatingDescriptionTagsLast U/LMaintainers
+ +
+
+
+ Alternatively, if you are looking for a particular function then try Hoogle. +
+
+ + diff --git a/hackage-server.cabal b/hackage-server.cabal index eaf343fb6..3ace74a33 100644 --- a/hackage-server.cabal +++ b/hackage-server.cabal @@ -253,6 +253,10 @@ library lib-server Distribution.Server.Util.Markdown Distribution.Server.Features + Distribution.Server.Features.Browse + Distribution.Server.Features.Browse.ApplyFilter + Distribution.Server.Features.Browse.Options + Distribution.Server.Features.Browse.Parsers Distribution.Server.Features.Core Distribution.Server.Features.Core.State Distribution.Server.Features.Core.Backup @@ -360,6 +364,7 @@ library lib-server , async ^>= 2.2.1 -- requires bumping http-io-streams , attoparsec ^>= 0.13 + , attoparsec-iso8601 ^>= 1.0 , base16-bytestring ^>= 1.0 -- requires bumping http-io-streams , base64-bytestring ^>= 1.1 @@ -511,6 +516,21 @@ test-suite HighLevelTest , io-streams ^>= 1.5.0.1 , http-io-streams ^>= 0.1.0.0 +test-suite PaginationTest + import: test-defaults + type: exitcode-stdio-1.0 + main-is: PaginationTest.hs + build-tool-depends: hackage-server:hackage-server + other-modules: Util + +test-suite BrowseQueryParserTest + import: test-defaults + type: exitcode-stdio-1.0 + main-is: BrowseQueryParserTest.hs + build-tool-depends: hackage-server:hackage-server + other-modules: Util + build-depends: attoparsec + test-suite CreateUserTest import: test-defaults diff --git a/src/Distribution/Server/Features.hs b/src/Distribution/Server/Features.hs index 669342b43..39daeb7c5 100644 --- a/src/Distribution/Server/Features.hs +++ b/src/Distribution/Server/Features.hs @@ -21,6 +21,7 @@ import Distribution.Server.Features.Upload (initUploadFeature) import Distribution.Server.Features.Mirror (initMirrorFeature) #ifndef MINIMAL +import Distribution.Server.Features.Browse (initNewBrowseFeature) import Distribution.Server.Features.TarIndexCache (initTarIndexCacheFeature) import Distribution.Server.Features.Html (initHtmlFeature) import Distribution.Server.Features.PackageCandidates (initPackageCandidatesFeature, candidatesCoreResource, queryGetCandidateIndex) @@ -151,6 +152,8 @@ initHackageFeatures env@ServerEnv{serverVerbosity = verbosity} = do initSitemapFeature env mkPackageFeedFeature <- logStartup "package feed" $ initPackageFeedFeature env + mkNewBrowseFeature <- logStartup "new browse" $ + initNewBrowseFeature env #endif loginfo verbosity "Initialising features, part 2" @@ -324,6 +327,13 @@ initHackageFeatures env@ServerEnv{serverVerbosity = verbosity} = do usersFeature tarIndexCacheFeature + browseFeature <- mkNewBrowseFeature + coreFeature + usersFeature + tagsFeature + listFeature + searchFeature + #endif -- The order of initialization above should be the same as @@ -370,6 +380,7 @@ initHackageFeatures env@ServerEnv{serverVerbosity = verbosity} = do #ifdef DEBUG , serverCrashFeature #endif + , browseFeature ] -- Run all post init hooks, now that everyone's gotten a chance to register @@ -388,6 +399,7 @@ initHackageFeatures env@ServerEnv{serverVerbosity = verbosity} = do loginfo verbosity ("Initialising " ++ feature ++ " feature") logTiming verbosity ("Initialising " ++ feature ++ " feature done") action + -- | Checkpoint a feature's persistent state to disk. featureCheckpoint :: HackageFeature -> IO () featureCheckpoint = mapM_ abstractStateCheckpoint . featureState diff --git a/src/Distribution/Server/Features/Browse.hs b/src/Distribution/Server/Features/Browse.hs new file mode 100644 index 000000000..972342ddd --- /dev/null +++ b/src/Distribution/Server/Features/Browse.hs @@ -0,0 +1,127 @@ +{-# LANGUAGE BlockArguments, NamedFieldPuns, TupleSections #-} +module Distribution.Server.Features.Browse (initNewBrowseFeature, PaginationConfig(..), StartIndex(..), NumElems(..), paginate) where + +import Control.Arrow (left) +import Control.Monad.Except (ExceptT, liftIO, throwError) +import Control.Monad.Trans.Class (lift) +import Control.Monad.Trans.Except (except) +import Data.ByteString.Lazy (ByteString) +import Data.Time (getCurrentTime) + +import Data.Aeson (Value(Array), eitherDecode, object, toJSON, (.=)) +import qualified Data.Text as T +import qualified Data.Vector as V + +import Distribution.Server.Features.Browse.ApplyFilter (applyFilter) +import Distribution.Server.Features.Browse.Options (BrowseOptions(..), IsSearch(..)) +import Distribution.Server.Features.Core (CoreFeature(CoreFeature), queryGetPackageIndex, coreResource) +import Distribution.Server.Features.PackageList (ListFeature(ListFeature), makeItemList) +import Distribution.Server.Features.Search (SearchFeature(SearchFeature), searchPackages) +import Distribution.Server.Features.Tags (TagsFeature(TagsFeature), tagsResource) +import Distribution.Server.Features.Users (UserFeature(UserFeature), userResource) +import Distribution.Server.Framework.Error (ErrorResponse(ErrorResponse)) +import Distribution.Server.Framework.Feature (HackageFeature(..), emptyHackageFeature) +import Distribution.Server.Framework.Resource (Resource(..), resourceAt) +import Distribution.Server.Framework.ServerEnv (ServerEnv(..)) +import qualified Distribution.Server.Pages.Index as Pages + +import Happstack.Server.Monads (ServerPartT) +import Happstack.Server.Response (ToMessage(toResponse)) +import Happstack.Server.RqData (lookBS) +import Happstack.Server.Types (Method(POST), Response) + +type BrowseFeature = + CoreFeature + -> UserFeature + -> TagsFeature + -> ListFeature + -> SearchFeature + -> IO HackageFeature + +initNewBrowseFeature :: ServerEnv -> IO BrowseFeature +initNewBrowseFeature _env = + pure \coreFeature userFeature tagsFeature listFeature searchFeature -> + pure $ + (emptyHackageFeature "json") + { featureResources = + [ (resourceAt "/newpkglist") + { resourceDesc = + [ (POST, "Browse and search using a BrowseOptions structure in multipart/form-data encoding") + ] + , resourcePost = + [ ("json" + , \_ -> getNewPkgList coreFeature userFeature tagsFeature listFeature searchFeature + ) + ] + } + ] + , featureState = [] + } + +data PaginationConfig = PaginationConfig + { totalNumberOfElements :: Int + , pageNumber :: Int + } + deriving Show + +newtype NumElems = NumElems Int + deriving (Eq, Show) +newtype StartIndex = StartIndex Int + deriving (Eq, Show) + +paginate :: PaginationConfig -> Maybe (StartIndex, NumElems) +paginate PaginationConfig{totalNumberOfElements, pageNumber} = do + let pageSize = 50 -- make sure it is kept in sync with frontend + startIndex <- + if totalNumberOfElements <= pageNumber * pageSize + then + -- We don't want to claim that the page 0 is ever out of bounds, + -- since it is normal to request page 0 of a listing with 0 results. + if pageNumber == 0 + then Just 0 + else Nothing + else Just $ pageNumber * pageSize + Just + ( StartIndex startIndex + , NumElems $ + if totalNumberOfElements < startIndex + pageSize + then totalNumberOfElements - startIndex + else pageSize + ) + +getNewPkgList :: CoreFeature -> UserFeature -> TagsFeature -> ListFeature -> SearchFeature -> ServerPartT (ExceptT ErrorResponse IO) Response +getNewPkgList CoreFeature{queryGetPackageIndex, coreResource} UserFeature{userResource} TagsFeature{tagsResource} ListFeature{makeItemList} SearchFeature{searchPackages} = do + browseOptionsBS <- lookBS "browseOptions" + browseOptions <- lift (parseBrowseOptions browseOptionsBS) + (isSearch, packageNames) <- + case boSearchTerms browseOptions of + [] -> (IsNotSearch,) <$> Pages.toPackageNames <$> queryGetPackageIndex + terms -> (IsSearch,) <$> liftIO (searchPackages terms) + pkgDetails <- liftIO (makeItemList packageNames) + now <- liftIO getCurrentTime + let listOfPkgs = applyFilter now isSearch coreResource userResource tagsResource browseOptions pkgDetails + config = + PaginationConfig + { totalNumberOfElements = length listOfPkgs + , pageNumber = fromIntegral $ boPage browseOptions + } + (StartIndex startIndex, NumElems numElems) <- + lift $ maybe + (throwError . badRespFromString $ "Invalid page number: " ++ show config) + pure + (paginate config) + let pageContents = V.slice startIndex numElems (V.fromList listOfPkgs) + pure . toResponse $ + object + [ T.pack "pageContents" .= Array pageContents + , T.pack "numberOfResults" .= toJSON (length listOfPkgs) + ] + +parseBrowseOptions :: ByteString -> ExceptT ErrorResponse IO BrowseOptions +parseBrowseOptions browseOptionsBS = except eiDecoded + where + eiDecoded :: Either ErrorResponse BrowseOptions + eiDecoded = left badRespFromString (eitherDecode browseOptionsBS) + +badRespFromString :: String -> ErrorResponse +badRespFromString err = ErrorResponse 400 [] err [] diff --git a/src/Distribution/Server/Features/Browse/ApplyFilter.hs b/src/Distribution/Server/Features/Browse/ApplyFilter.hs new file mode 100644 index 000000000..7f60be229 --- /dev/null +++ b/src/Distribution/Server/Features/Browse/ApplyFilter.hs @@ -0,0 +1,106 @@ +{-# LANGUAGE RecordWildCards, NamedFieldPuns, TypeApplications #-} +module Distribution.Server.Features.Browse.ApplyFilter (applyFilter) where + +import Data.List (sortBy) +import Data.Ord (comparing) +import Data.Time.Clock (UTCTime(utctDay), diffUTCTime) +import Data.Time.Format.ISO8601 (iso8601Show) + +import Data.Aeson (Value, (.=), object) +import qualified Data.Text as T +import qualified Data.Set as S + +import Distribution.Server.Features.Browse.Options (BrowseOptions(..), Direction(..), Column(..), Sort(..), NormalColumn(..), IsSearch(..)) +import Distribution.Server.Features.Browse.Parsers (DeprecatedOption(..), Filter(..), operatorToFunction) +import Distribution.Server.Features.Core (CoreResource, corePackageNameUri) +import Distribution.Server.Features.PackageList(PackageItem(..)) +import Distribution.Server.Features.Tags (Tag(..), TagsResource, tagUri) +import Distribution.Server.Features.Users (UserResource, userPageUri) +import Distribution.Server.Users.Types (UserName) +import Distribution.Text (display) + +applyFilter :: UTCTime -> IsSearch -> CoreResource -> UserResource -> TagsResource -> BrowseOptions -> [PackageItem] -> [Value] +applyFilter now isSearch coreResource userResource tagsResource browseOptions items = + map packageIndexInfoToValue $ + sort isSearch (boSort browseOptions) $ + filterPackages now (boFilters browseOptions) $ + items + where + packageIndexInfoToValue :: PackageItem -> Value + packageIndexInfoToValue PackageItem{..} = + object + [ T.pack "name" .= renderPackage itemName + , T.pack "downloads" .= itemDownloads + , T.pack "votes" .= itemVotes + , T.pack "description" .= itemDesc + , T.pack "tags" .= map renderTag (S.toAscList itemTags) + , T.pack "lastUpload" .= iso8601Show itemLastUpload + , T.pack "maintainers" .= map renderUser itemMaintainer + ] + renderTag :: Tag -> Value + renderTag tag = + object + [ T.pack "uri" .= tagUri tagsResource "" tag + , T.pack "display" .= display tag + ] + renderUser :: UserName -> Value + renderUser user = + object + [ T.pack "uri" .= userPageUri userResource "" user + , T.pack "display" .= display user + ] + renderPackage pkg = + object + [ T.pack "uri" .= corePackageNameUri coreResource "" pkg + , T.pack "display" .= display pkg + ] + +sort :: IsSearch -> Sort -> [PackageItem] -> [PackageItem] +sort isSearch Sort {sortColumn, sortDirection} = + case sortColumn of + DefaultColumn -> + case isSearch of + IsSearch -> id + IsNotSearch -> id + NormalColumn normalColumn -> + let + comparer = + case normalColumn of + Name -> comparing itemName + Downloads -> comparing itemDownloads + Rating -> comparing itemVotes + Description -> comparing itemDesc + Tags -> comparing (S.toAscList . itemTags) + LastUpload -> comparing itemLastUpload + Maintainers -> comparing itemMaintainer + in sortBy (maybeReverse comparer) + where + maybeReverse = + case sortDirection of + Ascending -> id + Descending -> flip + +includeItem :: UTCTime -> PackageItem -> Filter -> Bool +includeItem _ PackageItem{ itemDownloads } (DownloadsFilter ( op, sndParam)) = operatorToFunction op (fromIntegral @Int @Word itemDownloads) sndParam +includeItem _ PackageItem{ itemVotes } (RatingFilter (op, sndParam) ) = operatorToFunction op itemVotes sndParam +includeItem _ PackageItem{ itemLastUpload } (LastUploadFilter (op, sndParam)) = operatorToFunction op (utctDay itemLastUpload) sndParam +includeItem _ PackageItem{ itemTags } (TagFilter tagStr) = any (\tag -> display tag == tagStr) itemTags +includeItem _ PackageItem{ itemMaintainer } (MaintainerFilter maintainerStr) = any (\user -> display user == maintainerStr) itemMaintainer +includeItem now PackageItem{ itemLastUpload } (AgeLastULFilter (op, sndParam)) = operatorToFunction op (diffUTCTime now itemLastUpload) sndParam +includeItem _ PackageItem{ itemDeprecated } (DeprecatedFilter OnlyDeprecated) = not (null itemDeprecated) +includeItem _ PackageItem{ itemDeprecated } (DeprecatedFilter ExcludeDeprecated) = null itemDeprecated +includeItem _ _ (DeprecatedFilter Don'tCareAboutDeprecated) = True +includeItem now packageItem (Not filt) = not (includeItem now packageItem filt) + +filterPackages :: UTCTime -> [Filter] -> [PackageItem] -> [PackageItem] +filterPackages now filtersWithoutDefaults = filter filterForItem + where + filterForItem :: PackageItem -> Bool + filterForItem item = all (includeItem now item) filtersWithDefaults + filtersWithDefaults = + -- The lack of other filters means we don't care. + -- But deprecated packages are excluded by default. + -- So we check if the user has overriden the default filter. + case [ x | DeprecatedFilter x <- filtersWithoutDefaults ] of + [] -> DeprecatedFilter ExcludeDeprecated : filtersWithoutDefaults + _ -> filtersWithoutDefaults diff --git a/src/Distribution/Server/Features/Browse/Options.hs b/src/Distribution/Server/Features/Browse/Options.hs new file mode 100644 index 000000000..c894104b0 --- /dev/null +++ b/src/Distribution/Server/Features/Browse/Options.hs @@ -0,0 +1,78 @@ +{-# LANGUAGE BlockArguments, LambdaCase, OverloadedStrings #-} +module Distribution.Server.Features.Browse.Options (BrowseOptions(..), Column(..), Direction(..), IsSearch(..), NormalColumn(..), Sort(..)) where + +import Data.Aeson ((.:), FromJSON(parseJSON), withObject, withText) +import Data.Attoparsec.Text (parseOnly) +import qualified Data.Text as T + +import Distribution.Server.Features.Browse.Parsers (Filter, conditions, condsToFiltersAndTerms) + +data IsSearch = IsSearch | IsNotSearch + +data NormalColumn = Name | Downloads | Rating | Description | Tags | LastUpload | Maintainers + deriving (Show, Eq) + +data Column = DefaultColumn | NormalColumn NormalColumn + deriving (Show, Eq) + +data Direction = Ascending | Descending + deriving (Show, Eq) + +data Sort = Sort + { sortColumn :: Column + , sortDirection :: Direction + } + deriving (Show, Eq) + +data BrowseOptions = BrowseOptions + { boPage :: Word + , boSort :: Sort + , boFilters :: [Filter] + , boSearchTerms :: [String] + } + +instance FromJSON Column where + parseJSON = + withText "Column" + \case + "default" -> pure DefaultColumn + "name" -> pure $ NormalColumn Name + "downloads" -> pure $ NormalColumn Downloads + "rating" -> pure $ NormalColumn Rating + "description" -> pure $ NormalColumn Description + "tags" -> pure $ NormalColumn Tags + "lastUpload" -> pure $ NormalColumn LastUpload + "maintainers" -> pure $ NormalColumn Maintainers + t -> fail $ "Column invalid: " ++ T.unpack t + +instance FromJSON Direction where + parseJSON = + withText "Direction" + \case + "ascending" -> pure Ascending + "descending" -> pure Descending + t -> fail $ "Direction invalid: " ++ T.unpack t + +instance FromJSON Sort where + parseJSON = withObject "Sort" \o -> + Sort + <$> o .: "column" + <*> o .: "direction" + +parse :: MonadFail m => T.Text -> m ([Filter], [String]) +parse searchQuery = do + -- Search query parsing should never fail + Right conds <- pure (parseOnly conditions searchQuery) + pure (condsToFiltersAndTerms conds) + +instance FromJSON BrowseOptions where + parseJSON = withObject "BrowseOptions" \o -> do + (page, sort, searchQuery) <- + (,,) + <$> o .: "page" + <*> o .: "sort" + <*> o .: "searchQuery" + -- The use of monad here won't make us suffer from + -- sequentiality since the parse should never fail. + (filters, terms) <- parse searchQuery + pure (BrowseOptions page sort filters terms) diff --git a/src/Distribution/Server/Features/Browse/Parsers.hs b/src/Distribution/Server/Features/Browse/Parsers.hs new file mode 100644 index 000000000..eb984cf6a --- /dev/null +++ b/src/Distribution/Server/Features/Browse/Parsers.hs @@ -0,0 +1,161 @@ +{-# LANGUAGE BlockArguments, OverloadedStrings, TupleSections #-} +module Distribution.Server.Features.Browse.Parsers + ( Condition(..) + , DeprecatedOption(..) + , Filter(..) + , Operator(..) + , conditions + , condsToFiltersAndTerms + , filterOrSearchTerms + , operatorToFunction + , searchTerms + ) where + +import Prelude hiding (Ordering(..), filter) +import Control.Applicative ((<|>)) +import Control.Monad (guard, join) +import Data.Foldable (asum) +import Data.Time (Day, NominalDiffTime, nominalDay) +import GHC.Float (double2Float) + +import Data.Attoparsec.Text +import Data.Attoparsec.Time (day) +import Data.Text (Text) + +data DeprecatedOption + = OnlyDeprecated + | ExcludeDeprecated + | Don'tCareAboutDeprecated + deriving (Show, Eq) + +data Filter + = DownloadsFilter (Operator, Word) + | RatingFilter (Operator, Float) + | LastUploadFilter (Operator, Day) + | AgeLastULFilter (Operator, NominalDiffTime) + | TagFilter String + | MaintainerFilter String + | DeprecatedFilter DeprecatedOption + | Not Filter + deriving (Show, Eq) + +data Operator = LT | LTE | GT | GTE | EQ | NEQ + deriving (Show, Eq) + +deprecatedOption :: Parser DeprecatedOption +deprecatedOption = + asum + [ "any" *> pure Don'tCareAboutDeprecated + , ("false" <|> "no") *> pure ExcludeDeprecated + , ("true" <|> "yes") *> pure OnlyDeprecated + ] + + +operatorToFunction :: Ord a => Operator -> a -> (a -> Bool) +operatorToFunction LT a = (a <) +operatorToFunction LTE a = (a <=) +operatorToFunction GT a = (a >) +operatorToFunction GTE a = (a >=) +operatorToFunction EQ a = (a ==) +operatorToFunction NEQ a = (a /=) + +data Condition = FilterCond Filter | SearchTermCond String + deriving (Show, Eq) + +condsToFiltersAndTerms :: [Condition] -> ([Filter], [String]) +condsToFiltersAndTerms conds = + ([x | FilterCond x <- conds], [x | SearchTermCond x <- conds]) + +opAndSndParam :: Ord a => Parser a -> Parser (Operator, a) +opAndSndParam parser = do + let mkParser op = skipSpace *> fmap (op,) parser + lt = "<" *> mkParser LT + gt = ">" *> mkParser GT + gte = ">=" *> mkParser GTE + lte = "<=" *> mkParser LTE + eq = "=" *> mkParser EQ + longEq = "==" *> mkParser EQ + neq = "/=" *> mkParser NEQ + cStyleNeq = "!=" *> mkParser NEQ + in asum [lt, gt, gte, lte, eq, longEq, neq, cStyleNeq] + +allowedAfterOpeningBrace :: AllowNot -> Parser Text +allowedAfterOpeningBrace AllowNot = "not " <|> allowedAfterOpeningBrace DisallowNot +allowedAfterOpeningBrace _ = + asum + [ "downloads", "rating", "lastUpload" , "ageOfLastUpload" + , "tag:", "maintainer:", "deprecated:" + ] + +-- Whether the 'not' operator can be used. +-- (used to prevent recursive parsing) +data AllowNot = AllowNot | DisallowNot + +filterWith :: AllowNot -> Parser Filter +filterWith allowNot = do + fieldName <- allowedAfterOpeningBrace allowNot + if fieldName == "not " + then Not <$> filterWith DisallowNot + else do + skipSpace + let nonNegativeFloat :: Parser Float + nonNegativeFloat = do + float <- double2Float <$> double + guard $ float >= 0 + pure float + filt <- case fieldName of + "downloads" -> DownloadsFilter <$> opAndSndParam decimal + "rating" -> RatingFilter <$> opAndSndParam nonNegativeFloat + "lastUpload" -> LastUploadFilter <$> opAndSndParam day + "ageOfLastUpload" -> AgeLastULFilter <$> opAndSndParam nominalDiffTime + "tag:" -> TagFilter <$> wordWoSpaceOrParens + "maintainer:" -> MaintainerFilter <$> wordWoSpaceOrParens + "deprecated:" -> DeprecatedFilter <$> deprecatedOption + _ -> fail "Impossible since fieldName possibilities are known at compile time" + pure filt + +filter :: Parser [Condition] +filter = do + filt <- filterWith AllowNot + pure [FilterCond filt] + +filterOrSearchTerms :: Parser [Condition] +filterOrSearchTerms = + asum + [ do + _ <- "(" + skipSpace + filt <- filter <|> searchTerms <|> pure [] + skipSpace + _ <- ")" + pure filt + , searchTerms + ] + +searchTerms :: Parser [Condition] +searchTerms = sepBy1 searchTerm (many1 space) + +-- The search engine accepts terms with spaces or parenthesis in them also but +-- we do not allow that, just to keep this parser simple. +searchTerm :: Parser Condition +searchTerm = fmap SearchTermCond wordWoSpaceOrParens + +wordWoSpaceOrParens :: Parser String +wordWoSpaceOrParens = many1 . satisfy $ notInClass " ()" + +conditions :: Parser [Condition] +conditions = fmap join . many' $ skipSpace *> filterOrSearchTerms + +nominalDiffTime :: Parser NominalDiffTime +nominalDiffTime = do + num <- double + guard (num > 0) + skipSpace + lengthSpecifier <- "d" <|> "w" <|> "m" <|> "y" + let days = realToFrac num * nominalDay + case lengthSpecifier of + "d" -> pure days + "w" -> pure (days * 7) + "m" -> pure (days * 30.437) -- Average month length + "y" -> pure (days * 365.25) -- Average year length + _ -> fail "Impossible since lengthSpecifier possibilities are known at compile time" diff --git a/src/Distribution/Server/Features/Html.hs b/src/Distribution/Server/Features/Html.hs index 566a222db..9722a9c46 100644 --- a/src/Distribution/Server/Features/Html.hs +++ b/src/Distribution/Server/Features/Html.hs @@ -133,6 +133,7 @@ initHtmlFeature env@ServerEnv{serverTemplatesDir, serverTemplatesMode, , "tag-edit.html" , "candidate-page.html" , "candidate-index.html" + , "new-browse.html" ] @@ -552,6 +553,10 @@ mkHtmlCore ServerEnv{serverBaseURI, serverBlobStore} resourceDesc = [(GET, "Show browsable list of all packages")] , resourceGet = [("html", serveBrowsePage)] } + , (resourceAt "/packages/new_browse" ) { + resourceDesc = [(GET, "Show browsable list of all packages")] + , resourceGet = [("html", serveNewBrowsePage)] + } , (extendResource $ corePackagesPage cores) { resourceDesc = [(GET, "Show package index")] , resourceGet = [("html", const $ readAsyncCache cachePackagesPage)] @@ -574,6 +579,11 @@ mkHtmlCore ServerEnv{serverBaseURI, serverBlobStore} [ "heading" $= "All packages" , templateUnescaped "tabledata" tabledata] + serveNewBrowsePage :: DynamicPath -> ServerPartE Response + serveNewBrowsePage _dpath = do + template <- getTemplate templates "new-browse.html" + return $ toResponse $ template + [ "heading" $= "Browse and search packages" ] -- Currently the main package page is thrown together by querying a bunch -- of features about their attributes for the given package. It'll need diff --git a/tests/BrowseQueryParserTest.hs b/tests/BrowseQueryParserTest.hs new file mode 100644 index 000000000..cf1c88338 --- /dev/null +++ b/tests/BrowseQueryParserTest.hs @@ -0,0 +1,73 @@ +{-# LANGUAGE OverloadedStrings, ScopedTypeVariables, BlockArguments #-} +module Main where + +import Prelude hiding (Ordering(..)) + +import Control.Monad.State.Lazy +import Data.Attoparsec.Text +import Data.Text (Text) +import Data.Time (fromGregorian, nominalDay) +import System.Exit (die) + +import Distribution.Server.Features.Browse.Parsers + +assertEqual :: forall a b. (Eq a, Eq b, Show a, Show b) => Either a b -> b -> StateT Int IO () +assertEqual actual onlyRight = do + let reference :: Either a b + reference = Right onlyRight + if actual /= reference + then do + lift do + putStrLn "Expected" + print reference + putStrLn "But got" + print actual + gotten <- get + lift . die $ "Failed test " <> show gotten <> " (zero-indexed)" + else modify (+1) + +assertParses :: Text -> [Condition] -> StateT Int IO () +assertParses searchString = assertEqual (parseOnly conditions searchString) + +main :: IO () +main = do + let inp = " dsa( downloads < 100 )( rating > 5.2) test john (lastUpload /= 2000-02-29)" + ref = + [ SearchTermCond "dsa" + , FilterCond (DownloadsFilter (LT, 100)) + , FilterCond (RatingFilter (GT, 5.2)) + , SearchTermCond "test" + , SearchTermCond "john" + , FilterCond (LastUploadFilter (NEQ, fromGregorian 2000 2 29)) + ] + in flip evalStateT 0 do + assertEqual (parseOnly searchTerms "test donkey") [ SearchTermCond "test", SearchTermCond "donkey" ] + assertEqual (parseOnly filterOrSearchTerms "(test donkey)") [ SearchTermCond "test", SearchTermCond "donkey" ] + assertParses "(test donkey)" [ SearchTermCond "test", SearchTermCond "donkey" ] + assertParses "test donkey" [ SearchTermCond "test", SearchTermCond "donkey" ] + assertParses "test donkey" [ SearchTermCond "test", SearchTermCond "donkey" ] + assertParses "test () donkey" [ SearchTermCond "test", SearchTermCond "donkey" ] + assertParses "test (donkey)" [ SearchTermCond "test", SearchTermCond "donkey" ] + assertParses "test1 (donkey1)" [ SearchTermCond "test1", SearchTermCond "donkey1" ] + assertParses "(test donkey)" [ SearchTermCond "test", SearchTermCond "donkey" ] + assertParses "(downloads<=10)" [ FilterCond (DownloadsFilter (LTE, 10)) ] + assertParses "(dl<=10)" [ SearchTermCond "dl<=10" ] + assertParses "(lastUpload!=9999-12-31)" [FilterCond (LastUploadFilter (NEQ, fromGregorian 9999 12 31))] + assertParses "(maintainer:EdwardKmett)" [FilterCond (MaintainerFilter "EdwardKmett")] + assertParses "(maintainer:23skidoo)" [FilterCond (MaintainerFilter "23skidoo")] + assertParses "(tag:network)" [FilterCond (TagFilter "network")] + assertParses "(ageOfLastUpload<5y)" [FilterCond (AgeLastULFilter (LT, nominalDay * 365.25 * 5))] + assertParses "(ageOfLastUpload<0.00001d)" [FilterCond (AgeLastULFilter (LT, nominalDay * 0.00001))] + assertParses "(rating<=NaN)" [ SearchTermCond "rating<=NaN" ] + assertParses "(rating<=-1)" [ SearchTermCond "rating<=-1" ] + assertParses "(rating<=-0)" [ FilterCond (RatingFilter (LTE, 0)) ] + assertParses "(downloads<-1)" [ SearchTermCond "downloads<-1" ] + assertParses "(not maintainer:EdwardKmett)" [ FilterCond (Not (MaintainerFilter "EdwardKmett")) ] + assertParses "(not not maintainer:EdwardKmett)" [ SearchTermCond "not", SearchTermCond "not", SearchTermCond "maintainer:EdwardKmett" ] + assertParses "(deprecated:true)" [ FilterCond (DeprecatedFilter OnlyDeprecated) ] + assertParses "(deprecated:yes)" [ FilterCond (DeprecatedFilter OnlyDeprecated) ] + assertParses "(deprecated:false)" [ FilterCond (DeprecatedFilter ExcludeDeprecated) ] + assertParses "(deprecated:no)" [ FilterCond (DeprecatedFilter ExcludeDeprecated) ] + assertParses "(deprecated:any)" [ FilterCond (DeprecatedFilter Don'tCareAboutDeprecated) ] + assertParses "" [] + assertParses inp ref diff --git a/tests/Distribution/Server/Packages/UnpackTest.hs b/tests/Distribution/Server/Packages/UnpackTest.hs index 74f921c10..5db1732f7 100644 --- a/tests/Distribution/Server/Packages/UnpackTest.hs +++ b/tests/Distribution/Server/Packages/UnpackTest.hs @@ -18,9 +18,9 @@ deriving instance Eq Tar.FileNameError deriving instance Eq CombinedTarErrs -- | Test that check permissions does the right thing -testPermissions :: FilePath -> -- ^ .tar.gz file to test - (Tar.Entry -> Maybe CombinedTarErrs) -> -- ^ Converter to create errors if necessary - Assertion +testPermissions :: FilePath -- ^ .tar.gz file to test + -> (Tar.Entry -> Maybe CombinedTarErrs) -- ^ Converter to create errors if necessary + -> Assertion testPermissions tarPath mangler = do entries <- return . Tar.read . GZip.decompress =<< BL.readFile tarPath let mappedEntries = Tar.foldEntries Tar.Next Tar.Done (Tar.Fail . FormatError) entries diff --git a/tests/PaginationTest.hs b/tests/PaginationTest.hs new file mode 100644 index 000000000..0e8d48c9e --- /dev/null +++ b/tests/PaginationTest.hs @@ -0,0 +1,37 @@ +module Main where + +import Control.Monad (unless) +import Distribution.Server.Features.Browse (StartIndex(..), NumElems(..), paginate, PaginationConfig(..)) +import System.Exit (die) + +main :: IO () +main = do + let res = paginate $ PaginationConfig 10 0 + unless (res == Just (StartIndex 0, NumElems 10)) $ + die $ "Mismatch 1 " ++ show res + + -- We don't want to claim that the page 0 is ever out of bounds, + -- since it is normal to request page 0 of a listing with 0 results. + let res = paginate $ PaginationConfig 0 0 + unless (res == Just (StartIndex 0, NumElems 0)) $ + die $ "Mismatch 2 " ++ show res + + let res = paginate $ PaginationConfig 10 1 + unless (res == Nothing) $ + die $ "Mismatch 3 " ++ show res + + let res = paginate $ PaginationConfig 11 1 + unless (res == Just (StartIndex 10, NumElems 1)) $ + die $ "Mismatch 4 " ++ show res + + let res = paginate $ PaginationConfig 9 0 + unless (res == Just (StartIndex 0, NumElems 9)) $ + die $ "Mismatch 5 " ++ show res + + let res = paginate $ PaginationConfig 20 0 + unless (res == Just (StartIndex 0, NumElems 10)) $ + die $ "Mismatch 6 " ++ show res + + let res = paginate $ PaginationConfig 20 1 + unless (res == Just (StartIndex 10, NumElems 10)) $ + die $ "Mismatch 7 " ++ show res From d3ac12a4b17b65f25a76f1c8f0182a89be97cfd2 Mon Sep 17 00:00:00 2001 From: Janus Troelsen Date: Mon, 21 Feb 2022 00:18:05 -0600 Subject: [PATCH 2/7] Make new-browse handle old endpoints --- datafiles/static/new-browse.js | 2 +- src/Distribution/Server/Features/Browse.hs | 2 +- src/Distribution/Server/Features/Html.hs | 231 +-------------------- 3 files changed, 6 insertions(+), 229 deletions(-) diff --git a/datafiles/static/new-browse.js b/datafiles/static/new-browse.js index aed8269ca..6bc886f41 100644 --- a/datafiles/static/new-browse.js +++ b/datafiles/static/new-browse.js @@ -43,7 +43,7 @@ const get = () => new Promise((resolve,reject) => { , searchQuery: state.searchQuery }; formData.append('browseOptions', JSON.stringify(obj)); - fetch('/newpkglist', {method:'POST', body: formData}).then(async (response) => { + fetch('/packages/search', {method:'POST', body: formData}).then(async (response) => { if (!response.ok) { const el = d.querySelector("#fatalError"); el.style.display = "block"; diff --git a/src/Distribution/Server/Features/Browse.hs b/src/Distribution/Server/Features/Browse.hs index 972342ddd..1be48a099 100644 --- a/src/Distribution/Server/Features/Browse.hs +++ b/src/Distribution/Server/Features/Browse.hs @@ -44,7 +44,7 @@ initNewBrowseFeature _env = pure $ (emptyHackageFeature "json") { featureResources = - [ (resourceAt "/newpkglist") + [ (resourceAt "/packages/search") { resourceDesc = [ (POST, "Browse and search using a BrowseOptions structure in multipart/form-data encoding") ] diff --git a/src/Distribution/Server/Features/Html.hs b/src/Distribution/Server/Features/Html.hs index 9722a9c46..fcf7ac1de 100644 --- a/src/Distribution/Server/Features/Html.hs +++ b/src/Distribution/Server/Features/Html.hs @@ -21,7 +21,6 @@ import Distribution.Server.Features.Users import Distribution.Server.Features.DownloadCount import Distribution.Server.Features.Votes import Distribution.Server.Features.Search -import Distribution.Server.Features.Search as Search import Distribution.Server.Features.PreferredVersions -- [reverse index disabled] import Distribution.Server.Features.ReverseDependencies import Distribution.Server.Features.PackageContents (PackageContentsFeature(..)) @@ -68,9 +67,6 @@ import qualified Data.Map as Map import qualified Data.Set as Set import qualified Data.Vector as Vec import qualified Data.Text as T -import Data.Array (Array, listArray) -import qualified Data.Array as Array -import qualified Data.Ix as Ix import qualified Data.ByteString.Lazy.Char8 as BS (ByteString, pack) import qualified Network.URI as URI @@ -301,7 +297,6 @@ htmlFeature env@ServerEnv{..} candidates templates htmlPreferred = mkHtmlPreferred utilities core versions htmlTags = mkHtmlTags utilities core upload user list tags templates - htmlSearch = mkHtmlSearch utilities core list names cacheBrowseTable templates htmlResources = concat [ htmlCoreResources htmlCore @@ -313,7 +308,6 @@ htmlFeature env@ServerEnv{..} , htmlPreferredResources htmlPreferred , htmlDownloadsResources htmlDownloads , htmlTagsResources htmlTags - , htmlSearchResources htmlSearch -- and user groups. package maintainers, trustees, admins , htmlGroupResource user (maintainersGroupResource . uploadResource $ upload) , htmlGroupResource user (trusteesGroupResource . uploadResource $ upload) @@ -517,7 +511,7 @@ mkHtmlCore ServerEnv{serverBaseURI, serverBlobStore} HtmlPreferred{..} cachePackagesPage cacheNamesPage - cacheBrowseTable + _cacheBrowseTable templates SearchFeature{..} PackageCandidatesFeature{..} @@ -553,10 +547,9 @@ mkHtmlCore ServerEnv{serverBaseURI, serverBlobStore} resourceDesc = [(GET, "Show browsable list of all packages")] , resourceGet = [("html", serveBrowsePage)] } - , (resourceAt "/packages/new_browse" ) { - resourceDesc = [(GET, "Show browsable list of all packages")] - , resourceGet = [("html", serveNewBrowsePage)] - } + , (extendResource searchPackagesResource) { + resourceGet = [("html", serveBrowsePage)] + } , (extendResource $ corePackagesPage cores) { resourceDesc = [(GET, "Show package index")] , resourceGet = [("html", const $ readAsyncCache cachePackagesPage)] @@ -573,14 +566,6 @@ mkHtmlCore ServerEnv{serverBaseURI, serverBlobStore} serveBrowsePage :: DynamicPath -> ServerPartE Response serveBrowsePage _dpath = do - template <- getTemplate templates "table-interface.html" - tabledata <- readAsyncCache cacheBrowseTable - return $ toResponse $ template - [ "heading" $= "All packages" - , templateUnescaped "tabledata" tabledata] - - serveNewBrowsePage :: DynamicPath -> ServerPartE Response - serveNewBrowsePage _dpath = do template <- getTemplate templates "new-browse.html" return $ toResponse $ template [ "heading" $= "Browse and search packages" ] @@ -1812,214 +1797,6 @@ mkHtmlTags HtmlUtilities{..} tagInPath :: forall m a. (MonadPlus m, FromReqURI a) => DynamicPath -> m a tagInPath dpath = maybe mzero return (lookup "tag" dpath >>= fromReqURI) - -{------------------------------------------------------------------------------- - Search --------------------------------------------------------------------------------} - -data HtmlSearch = HtmlSearch { - htmlSearchResources :: [Resource] - } - -mkHtmlSearch :: HtmlUtilities - -> CoreFeature - -> ListFeature - -> SearchFeature - -> AsyncCache BS.ByteString - -> Templates - -> HtmlSearch -mkHtmlSearch HtmlUtilities{..} - CoreFeature{..} - ListFeature{makeItemList} - SearchFeature{..} - cacheBrowseTable - templates = - HtmlSearch{..} - where - htmlSearchResources = [ - (extendResource searchPackagesResource) { - resourceGet = [("html", servePackageFind)] - } - ] - - servePackageFind :: DynamicPath -> ServerPartE Response - servePackageFind _ = do - (mtermsStr, mexplain) <- - queryString $ (,) <$> optional (look "terms") - <*> optional (look "explain") - let explain = isJust mexplain - case mtermsStr of - Just termsStr | explain - , terms <- words termsStr, not (null terms) -> do - params <- queryString getSearchRankParameters - results <- searchPackagesExplain params terms - return $ toResponse $ Resource.XHtml $ - hackagePage "Package search" $ - [ toHtml $ paramsForm params termsStr - , resetParamsForm termsStr - , toHtml $ explainResults results - ] - - Just termsStr | terms <- words termsStr -> do - tabledata <- if null terms - then readAsyncCache cacheBrowseTable - else do - names <- searchPackages terms - pkgDetails <- liftIO $ makeItemList names - let rowList = map makeRow pkgDetails - return . BS.pack . showHtmlFragment $ "" +++ rowList - template <- getTemplate templates "table-interface.html" - return $ toResponse $ template - [ "heading" $= toHtml (searchForm termsStr False) - , templateUnescaped "tabledata" tabledata - , "footer" $= alternativeSearchTerms termsStr] - - _ -> - return $ toResponse $ Resource.XHtml $ - hackagePage "Text search" $ - [ toHtml $ searchForm "" explain - , alternativeSearch - ] - where - searchForm termsStr explain = - [ h2 << "Package search" - , form ! [XHtml.method "GET", action "/packages/search"] << - [ input ! [value termsStr, name "terms", identifier "terms"] - , toHtml " " - , input ! [thetype "submit", value "Search"] - , if explain then input ! [thetype "hidden", name "explain"] - else noHtml - ] - ] - - alternativeSearch = - paragraph << - [ toHtml "Alternatively, if you are looking for a particular function then try " - , anchor ! [href hoogleBaseLink] << "Hoogle" - ] - alternativeSearchTerms termsStr = - paragraph << - [ toHtml "Alternatively, if you are looking for a particular function then try " - , anchor ! [href (hoogleLink termsStr)] << "Hoogle" - ] - hoogleBaseLink = "http://www.haskell.org/hoogle/" - hoogleLink termsStr = "http://www.haskell.org/hoogle/?hoogle=" <> termsStr - - explainResults :: (Maybe PackageName, [(Search.Explanation PkgDocField PkgDocFeatures T.Text, PackageName)]) -> [Html] - explainResults (exactMatch, results) = - [ h2 << "Results" - , h3 << "Exact Matches" - , maybe noHtml (toHtml . display) exactMatch - , case results of - [] -> noHtml - ((explanation1, _):_) -> - table ! [ border 1 ] << - ( ( tr << tableHeader explanation1) - : [ tr << tableRow explanation pkgname - | (explanation, pkgname) <- results ]) - ] - where - tableHeader Search.Explanation{..} = - [ th << "package", th << "overall score" ] - ++ [ th << (show term ++ " score") - | (term, _score) <- termScores ] - ++ [ th << (show term ++ " " ++ show field ++ " score") - | (term, fieldScores) <- termFieldScores - , (field, _score) <- fieldScores ] - ++ [ th << (show feature ++ " score") - | (feature, _score) <- nonTermScores ] - - tableRow Search.Explanation{..} pkgname = - [ td << display pkgname, td << show overallScore ] - ++ [ td << show score - | (_term, score) <- termScores ] - ++ [ td << show score - | (_term, fieldScores) <- termFieldScores - , (_field, score) <- fieldScores ] - ++ [ td << show score - | (_feature, score) <- nonTermScores ] - - getSearchRankParameters = do - let defaults = defaultSearchRankParameters - k1 <- lookRead "k1" `mplus` pure (paramK1 defaults) - bs <- sequence - [ lookRead ("b" ++ show field) - `mplus` pure (paramB defaults field) - | field <- Ix.range (minBound, maxBound :: PkgDocField) ] - ws <- sequence - [ lookRead ("w" ++ show field) - `mplus` pure (paramFieldWeights defaults field) - | field <- Ix.range (minBound, maxBound :: PkgDocField) ] - fs <- sequence - [ lookRead ("w" ++ show feature) - `mplus` pure (paramFeatureWeights defaults feature) - | feature <- Ix.range (minBound, maxBound :: PkgDocFeatures) ] - let barr, warr :: Array PkgDocField Float - barr = listArray (minBound, maxBound) bs - warr = listArray (minBound, maxBound) ws - farr = listArray (minBound, maxBound) fs - return defaults { - paramK1 = k1, - paramB = (barr Array.!), - paramFieldWeights = (warr Array.!), - paramFeatureWeights = (farr Array.!) - } - - paramsForm SearchRankParameters{..} termsStr = - [ h2 << "Package search (tuning & explanation)" - , form ! [XHtml.method "GET", action "/packages/search"] << - [ input ! [value termsStr, name "terms", identifier "terms"] - , toHtml " " - , input ! [thetype "submit", value "Search"] - , input ! [thetype "hidden", name "explain"] - , simpleTable [] [] $ - makeInput [thetype "text", value (show paramK1)] "k1" "K1 parameter" - : [ makeInput [thetype "text", value (show (paramB field))] - ("b" ++ fieldname) - ("B param for " ++ fieldname) - ++ makeInput [thetype "text", value (show (paramFieldWeights field)) ] - ("w" ++ fieldname) - ("Weight for " ++ fieldname) - | field <- Ix.range (minBound, maxBound :: PkgDocField) - , let fieldname = show field - ] - ++ [ makeInput [thetype "text", value (show (paramFeatureWeights feature)) ] - ("w" ++ featurename) - ("Weight for " ++ featurename) - | feature <- Ix.range (minBound, maxBound :: PkgDocFeatures) - , let featurename = show feature ] - ] - ] - resetParamsForm termsStr = - let SearchRankParameters{..} = defaultSearchRankParameters in - form ! [XHtml.method "GET", action "/packages/search"] << - (concat $ - [ input ! [ thetype "submit", value "Reset parameters" ] - , input ! [ thetype "hidden", name "terms", value termsStr ] - , input ! [ thetype "hidden", name "explain" ] - , input ! [ thetype "hidden", name "k1", value (show paramK1) ] ] - : [ [ input ! [ thetype "hidden" - , name ("b" ++ fieldname) - , value (show (paramB field)) - ] - , input ! [ thetype "hidden" - , name ("w" ++ fieldname) - , value (show (paramFieldWeights field)) - ] - ] - | field <- Ix.range (minBound, maxBound :: PkgDocField) - , let fieldname = show field - ] - ++ [ [ input ! [ thetype "hidden" - , name ("w" ++ featurename) - , value (show (paramFeatureWeights feature)) - ] - ] - | feature <- Ix.range (minBound, maxBound :: PkgDocFeatures) - , let featurename = show feature - ]) - - {------------------------------------------------------------------------------- Groups -------------------------------------------------------------------------------} From 8cfec3f90032d5865476b50cb597c4a4f20a5a2d Mon Sep 17 00:00:00 2001 From: Janus Troelsen Date: Mon, 21 Feb 2022 15:41:33 -0600 Subject: [PATCH 3/7] Adjust PaginationTest for pageSize=50 --- tests/PaginationTest.hs | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/tests/PaginationTest.hs b/tests/PaginationTest.hs index 0e8d48c9e..accd61e16 100644 --- a/tests/PaginationTest.hs +++ b/tests/PaginationTest.hs @@ -1,9 +1,10 @@ module Main where import Control.Monad (unless) -import Distribution.Server.Features.Browse (StartIndex(..), NumElems(..), paginate, PaginationConfig(..)) import System.Exit (die) +import Distribution.Server.Features.Browse (NumElems(..), PaginationConfig(..), StartIndex(..), paginate) + main :: IO () main = do let res = paginate $ PaginationConfig 10 0 @@ -20,18 +21,18 @@ main = do unless (res == Nothing) $ die $ "Mismatch 3 " ++ show res - let res = paginate $ PaginationConfig 11 1 - unless (res == Just (StartIndex 10, NumElems 1)) $ + let res = paginate $ PaginationConfig 51 1 + unless (res == Just (StartIndex 50, NumElems 1)) $ die $ "Mismatch 4 " ++ show res let res = paginate $ PaginationConfig 9 0 unless (res == Just (StartIndex 0, NumElems 9)) $ die $ "Mismatch 5 " ++ show res - let res = paginate $ PaginationConfig 20 0 - unless (res == Just (StartIndex 0, NumElems 10)) $ + let res = paginate $ PaginationConfig 100 0 + unless (res == Just (StartIndex 0, NumElems 50)) $ die $ "Mismatch 6 " ++ show res - let res = paginate $ PaginationConfig 20 1 - unless (res == Just (StartIndex 10, NumElems 10)) $ + let res = paginate $ PaginationConfig 100 1 + unless (res == Just (StartIndex 50, NumElems 50)) $ die $ "Mismatch 7 " ++ show res From 7bfa6a2a868955563bfcffaf2d2ecf1a9c97865d Mon Sep 17 00:00:00 2001 From: Janus Troelsen Date: Wed, 23 Feb 2022 14:54:08 -0600 Subject: [PATCH 4/7] Prettier/consistent margins in and around the form --- datafiles/templates/Html/new-browse.html.st | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/datafiles/templates/Html/new-browse.html.st b/datafiles/templates/Html/new-browse.html.st index 1373632de..5d31ae570 100644 --- a/datafiles/templates/Html/new-browse.html.st +++ b/datafiles/templates/Html/new-browse.html.st @@ -7,6 +7,7 @@