diff --git a/client/package-lock.json b/client/package-lock.json index e3ceee8..598caac 100644 --- a/client/package-lock.json +++ b/client/package-lock.json @@ -9454,6 +9454,11 @@ "integrity": "sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8=", "dev": true }, + "reselect": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/reselect/-/reselect-3.0.1.tgz", + "integrity": "sha1-79qpjqdFEyTQkrKyFjpqHXqaIUc=" + }, "resolve": { "version": "1.7.1", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.7.1.tgz", diff --git a/client/package.json b/client/package.json index 3451646..76ad217 100644 --- a/client/package.json +++ b/client/package.json @@ -45,6 +45,7 @@ "redux": "^4.0.0", "redux-saga": "^0.16.0", "redux-saga-thunk": "^0.7.1", + "reselect": "^3.0.1", "yup": "^0.24.1" } } diff --git a/client/src/state/allFolders/_helpers.js b/client/src/state/allFolders/_helpers.js new file mode 100644 index 0000000..178c4b6 --- /dev/null +++ b/client/src/state/allFolders/_helpers.js @@ -0,0 +1,25 @@ +/** + * helper function to transform object of key/value pairs to be sent as part of + * api call URL + * @param {Object} params object of query parameters + */ +export const concatParams = function(params) { + const paramsArr = Object.keys(params).reduce((acc, val) => { + // check if array, and concat item in array if value is true + if(Array.isArray(params[val])) { + const valArr = params[val]; + valArr.forEach(option => { + if(option.checked) acc.push(val.concat('=', option.label)); + }); + + return acc; + } else if(params[val] !== undefined && params[val] !== null) { + acc.push(val.concat('=', params[val])); + return acc; + } else { + return acc; + } + }, []) + + return paramsArr.join('&'); +} diff --git a/client/src/state/allFolders/actions.js b/client/src/state/allFolders/actions.js new file mode 100644 index 0000000..7f30050 --- /dev/null +++ b/client/src/state/allFolders/actions.js @@ -0,0 +1,50 @@ +import * as types from './types'; + +export const foldersFetchRequest = function(payload) { + return { + type: types.FOLDERS_FETCH_REQUEST, + payload + } +} + +export const foldersFetchSuccess = function(payload) { + return { + type: types.FOLDERS_FETCH_SUCCESS, + payload: payload + } +} + +export const foldersFetchFailure = function(error) { + return { + type: types.FOLDERS_FETCH_FAILURE, + payload: error + } +} + +export const foldersFetchExit = function() { + return { + type: types.FOLDERS_FETCH_EXIT, + } +} + +export const foldersSetCurrentPage = function(page) { + return { + type: types.FOLDERS_SET_CURRENT_PAGE, + payload: page + } +} + +export const foldersClearPageCache = function() { + return { + type: types.FOLDERS_CLEAR_PAGE_CACHE + } +} + +// this action does the same thing as fetchRequest, but adds a debounce of 2 seconds +// to account for the user typing +export const foldersFilterRequest = function(payload) { + return { + type: types.FOLDERS_FILTER_REQUEST, + payload + } +} \ No newline at end of file diff --git a/client/src/state/allFolders/api.js b/client/src/state/allFolders/api.js new file mode 100644 index 0000000..6a134b3 --- /dev/null +++ b/client/src/state/allFolders/api.js @@ -0,0 +1,46 @@ +import axios from 'axios'; +axios.defaults.withCredentials = true; +import * as helpers from './_helpers'; + +const BASE_URL = 'http://localhost:3000/api/' +const LIMIT = 25; + +const foldersFetch = async (params) => { + try { + const { page, sortKey, sortDirection, searchFilter, optionsFilter } = params; + + const queryString = helpers.concatParams({ + page: page, + limit: LIMIT, + sortBy: sortKey, + order: sortDirection, + q: searchFilter + // folder_type: optionsFilter, can be set leter via checkbox options + }); + + console.log(queryString); + const url = BASE_URL.concat('folder', '?', queryString); + const response = await axios.get(url); + // const totalCount = response.headers['x-total-count']; + const totalCount = response.data.data.total; + const lastPage = response.data.data.lastPage; + const results = response.data.data.results; + + // FIXME: the problem is if you make an api call for a page > last page in the api, + // the api will just return an empty data set but it will still be a 200 response code. + return { + data: results, + lastPage: lastPage, + currentPage: page + } + } catch(err) { + // TODO: add better error handling for "network error": i.e. when server is not online at all + // https://github.com/axios/axios#handling-errors + console.log(err); + throw err.response || err.message; + } +} + +export default { + foldersFetch +} \ No newline at end of file diff --git a/client/src/state/allFolders/index.js b/client/src/state/allFolders/index.js new file mode 100644 index 0000000..e5b33ee --- /dev/null +++ b/client/src/state/allFolders/index.js @@ -0,0 +1,15 @@ +import reducer from './reducers'; +import * as types from './types'; +import * as actions from './actions'; +import * as selectors from './selectors'; +import { sagas } from './sagas'; + +export { + types, + actions, + sagas, + selectors +} + +export default reducer; + diff --git a/client/src/state/allFolders/reducers.js b/client/src/state/allFolders/reducers.js new file mode 100644 index 0000000..ecb14eb --- /dev/null +++ b/client/src/state/allFolders/reducers.js @@ -0,0 +1,104 @@ +import * as types from './types'; +import { combineReducers } from 'redux'; + +const isLoading = (state = false, action) => { + switch (action.type) { + case types.FOLDERS_FETCH_REQUEST: + return true; + case types.FOLDERS_FETCH_SUCCESS: + return false; + case types.FOLDERS_FETCH_FAILURE: + return false; + case types.FOLDERS_FETCH_EXIT: // used to prematurely exit fetch if folders are already cached + return false; + default: + return state; + } +} + +const error = (state = null, action) => { + switch(action.type) { + case types.FOLDERS_FETCH_REQUEST: + return null; + case types.FOLDERS_FETCH_FAILURE: + return action.payload + default: + return state; + } +} + +// object containing all the items, with folder keys as the object properties +const byId = (state = {}, action) => { + console.log(action.payload); + const foldersList = {}; + switch(action.type) { + case types.FOLDERS_FETCH_SUCCESS: + for(let i = 0; i < action.payload.data.length; i++) { + const folderItem = action.payload.data[i]; + foldersList[folderItem.id] = folderItem; + } + return { + ...state, + ...foldersList + } + default: + return state; + } +} + +// array of only the item ids +const allIds = (state = [], action) => { + switch(action.type) { + case types.FOLDERS_FETCH_SUCCESS: + return [ + ...state, + ...action.payload.data.map((item) => { + return item.id; + }) + ] + default: + return state; + } +} + +const currentPage = (state = 1, action) => { + switch(action.type) { + case types.FOLDERS_SET_CURRENT_PAGE: + return action.payload; + default: + return state; + } +} + +const lastPage = (state = 0, action) => { + switch(action.type) { + case types.FOLDERS_FETCH_SUCCESS: + return action.payload.lastPage + default: + return state; + } +} + +const pages = (state = {}, action) => { + switch(action.type) { + case types.FOLDERS_FETCH_SUCCESS: + return { + ...state, + [action.payload.currentPage]: action.payload.data.map((item) => item.id) + } + case types.FOLDERS_CLEAR_PAGE_CACHE: + return {} + default: + return state; + } +} + +export default combineReducers({ + error, + isLoading, + byId, + allIds, + currentPage, + lastPage, + pages +}); \ No newline at end of file diff --git a/client/src/state/allFolders/sagas.js b/client/src/state/allFolders/sagas.js new file mode 100644 index 0000000..49ae790 --- /dev/null +++ b/client/src/state/allFolders/sagas.js @@ -0,0 +1,75 @@ +import { delay } from 'redux-saga'; +import { put, takeLatest, takeEvery, all, call, fork, select} from 'redux-saga/effects' +// import { push } from 'react-router-redux'; + +import * as actions from './actions'; +import * as types from './types'; +import * as selectors from './selectors' +import api from './api'; + +const FILTER_DEBOUNCE_DELAY = 1000; + + +export function* watchFolderFilterRequest() { + yield takeLatest(types.FOLDERS_FILTER_REQUEST, handleFilterRequest); +} + +export function* handleFilterRequest(action) { + yield delay(FILTER_DEBOUNCE_DELAY); + + // execute folder fetch after debounce delay + yield put(actions.foldersFetchRequest({ + ...action.payload, + page: 1 // manually set page to 1 as filtered results can have less pages than current query params + })); +} + +export function* watchFolderFetchRequest() { + yield takeLatest(types.FOLDERS_FETCH_REQUEST, foldersFetch); +} + +export function* foldersFetch(action) { + const { page, sortKey, sortDirection, clearCache=false, searchFilter=null, optionsFilter } = action.payload; + + try { + if(clearCache) { + yield put(actions.foldersClearPageCache()); // clear page cache to set new results + } + const currentPage = yield select(selectors.getCurrentPage); + const pageParam = page ? page : currentPage; // get current page from payload or store if not provided + const isPageCached = yield select(selectors.getIsFolderCached, pageParam); + + + // check is page is already loaded before making api call + if(!isPageCached || clearCache) { + // TODO: check is page is cached on sort/filter + const data = yield call(api.foldersFetch, { + page: pageParam, + sortKey, + sortDirection, + searchFilter, + optionsFilter + }); + yield put(actions.foldersSetCurrentPage(data.currentPage)); // set current page + yield put(actions.foldersFetchSuccess(data)); + } else { + yield put(actions.foldersSetCurrentPage(pageParam)); // set current page + yield put(actions.foldersFetchExit()); + } + + } catch(err) { + // TODO: better error handling + console.log('err'); + console.log(err); + yield put(actions.foldersFetchFailure(err)); + } +} + +// export only watcher sagas in one variable +export const sagas = [ + watchFolderFetchRequest, + watchFolderFilterRequest +]; + + + diff --git a/client/src/state/allFolders/selectors.js b/client/src/state/allFolders/selectors.js new file mode 100644 index 0000000..38adc79 --- /dev/null +++ b/client/src/state/allFolders/selectors.js @@ -0,0 +1,37 @@ +// SELECTORS +import { createSelector } from 'reselect'; + + +// INPUT SELECTORS +export const getCurrentPage = (state) => state.allFolders.currentPage; +export const getIsFolderCached = (state, page) => state.allFolders.pages[page] !== undefined; +const getFoldersById = (state) => state.allFolders.byId; +const getFoldersAllIds = (state) => state.allFolders.allIds; +const getFolderIdsByPage = state => { + const page = state.allFolders.currentPage; + const pageIds = state.allFolders.pages[page]; + + // check if page ids are already cached or not + if (pageIds === undefined) { + return []; + } else { + return pageIds + } +} + +// SELECTORS +// get all folders by mapping the array of only ids to the object containing +// all folders by their key +export const getAllFolders = createSelector( + [getFoldersById, getFoldersAllIds], + (foldersById, foldersAllIds) => { + return foldersAllIds.map((allIdsKey) => foldersById[allIdsKey]); + } +) + +export const getAllFoldersOfCurrentPage = createSelector( + [getFoldersById, getFolderIdsByPage], + (aById, aIdsByPage) => { + return aIdsByPage.map((pageIdKey) => aById[pageIdKey]); + } +) \ No newline at end of file diff --git a/client/src/state/allFolders/types.js b/client/src/state/allFolders/types.js new file mode 100644 index 0000000..d86e5c3 --- /dev/null +++ b/client/src/state/allFolders/types.js @@ -0,0 +1,8 @@ +export const FOLDERS_FETCH_REQUEST = 'FOLDERS_FETCH_REQUEST'; +export const FOLDERS_FETCH_SUCCESS = 'FOLDERS_FETCH_SUCCESS'; +export const FOLDERS_FETCH_FAILURE = 'FOLDERS_FETCH_FAILURE'; +export const FOLDERS_FETCH_EXIT = 'FOLDERS_FETCH_EXIT'; +export const FOLDERS_SET_CURRENT_PAGE = 'FOLDERS_SET_CURRENT_PAGE'; +export const FOLDERS_FILTER_REQUEST = 'FOLDERS_FILTER_REQUEST'; + +export const FOLDERS_CLEAR_PAGE_CACHE = 'FOLDERS_CLEAR_PAGE_CACHE'; \ No newline at end of file diff --git a/client/src/state/rootReducer.js b/client/src/state/rootReducer.js index b00f06b..859d9b6 100644 --- a/client/src/state/rootReducer.js +++ b/client/src/state/rootReducer.js @@ -1,6 +1,7 @@ import { combineReducers } from 'redux'; import auth from './authentication'; +import allFolders from './allFolders'; // allow redux to manage browser history import { routerReducer } from 'react-router-redux'; @@ -8,5 +9,6 @@ import { routerReducer } from 'react-router-redux'; export default combineReducers({ auth, + allFolders, router: routerReducer }); \ No newline at end of file diff --git a/client/src/state/rootSaga.js b/client/src/state/rootSaga.js index 419fcb9..ebdf7c1 100644 --- a/client/src/state/rootSaga.js +++ b/client/src/state/rootSaga.js @@ -1,9 +1,11 @@ import { sagas as authenticationSagas } from './authentication'; +import { sagas as allFoldersSagas } from './allFolders'; import { takeEvery, takeLatest, fork, all } from 'redux-saga/effects'; const allSagas = [ - ...authenticationSagas + ...authenticationSagas, + ...allFoldersSagas ] diff --git a/client/src/views/containers/FoldersList/Folder.jsx b/client/src/views/containers/FoldersList/Folder.jsx new file mode 100644 index 0000000..9e373f1 --- /dev/null +++ b/client/src/views/containers/FoldersList/Folder.jsx @@ -0,0 +1,22 @@ +import React from 'react'; +import PT from 'prop-types'; + +const Folder = ({ id, title, description, author }) => { + return ( +
{columnItem} | ); + }) + } +
---|