diff --git a/example/index.html b/example/index.html index ce8f6c4c6b21..40fab389695a 100644 --- a/example/index.html +++ b/example/index.html @@ -63,7 +63,7 @@ var ONE_DAY = 60 * 60 * 24 * 1000; - for (var i=1; i<=10; i++) { + for (var i=1; i<=20; i++) { var date = new Date(); date.setTime(date.getTime() + ONE_DAY); diff --git a/src/actions/entries.js b/src/actions/entries.js index a0aed54947a6..3f5e14490b54 100644 --- a/src/actions/entries.js +++ b/src/actions/entries.js @@ -1,12 +1,15 @@ -import { List } from 'immutable'; +import { fromJS, List, Set } from 'immutable'; import { actions as notifActions } from 'redux-notifications'; import { serializeValues } from 'Lib/serializeEntryValues'; import { currentBackend } from 'Backends/backend'; import { getIntegrationProvider } from 'Integrations'; import { getAsset, selectIntegration } from 'Reducers'; import { selectFields } from 'Reducers/collections'; +import { selectCollectionEntriesCursor } from 'Reducers/cursors'; +import Cursor from 'ValueObjects/Cursor'; import { createEntry } from 'ValueObjects/Entry'; import ValidationErrorTypes from 'Constants/validationErrorTypes'; +import isArray from 'lodash/isArray'; const { notifSend } = notifActions; @@ -80,13 +83,15 @@ export function entriesLoading(collection) { }; } -export function entriesLoaded(collection, entries, pagination) { +export function entriesLoaded(collection, entries, pagination, cursor, append = true) { return { type: ENTRIES_SUCCESS, payload: { collection: collection.get('name'), entries, page: pagination, + cursor: Cursor.create(cursor), + append, }, }; } @@ -238,6 +243,16 @@ export function loadEntry(collection, slug) { }; } +const appendActions = fromJS({ + ["append_next"]: { action: "next", append: true }, +}); + +const addAppendActionsToCursor = cursor => Cursor + .create(cursor) + .updateStore("actions", actions => actions.union( + appendActions.filter(v => actions.has(v.get("action"))).keySeq() + )); + export function loadEntries(collection, page = 0) { return (dispatch, getState) => { if (collection.get('isFetching')) { @@ -247,14 +262,86 @@ export function loadEntries(collection, page = 0) { const backend = currentBackend(state.config); const integration = selectIntegration(state, collection.get('name'), 'listEntries'); const provider = integration ? getIntegrationProvider(state.integrations, backend.getToken, integration) : backend; + const append = !!(page && !isNaN(page) && page > 0); dispatch(entriesLoading(collection)); - provider.listEntries(collection, page).then( - response => dispatch(entriesLoaded(collection, response.entries.reverse(), response.pagination)), - error => dispatch(entriesFailed(collection, error)) - ); + provider.listEntries(collection, page) + .then(response => ({ + ...response, + + // The only existing backend using the pagination system is the + // Algolia integration, which is also the only integration used + // to list entries. Thus, this checking for an integration can + // determine whether or not this is using the old integer-based + // pagination API. Other backends will simply store an empty + // cursor, which behaves identically to no cursor at all. + cursor: integration + ? Cursor.create({ actions: ["next"], meta: { usingOldPaginationAPI: true }, data: { nextPage: page + 1 } }) + : Cursor.create(response.cursor), + })) + .then(response => dispatch(entriesLoaded( + collection, + response.cursor.meta.get('usingOldPaginationAPI') + ? response.entries.reverse() + : response.entries, + response.pagination, + addAppendActionsToCursor(response.cursor), + append, + ))) + .catch(err => { + dispatch(notifSend({ + message: `Failed to load entries: ${ err }`, + kind: 'danger', + dismissAfter: 8000, + })); + return Promise.reject(dispatch(entriesFailed(collection, err))); + }); }; } +function traverseCursor(backend, cursor, action) { + if (!cursor.actions.has(action)) { + throw new Error(`The current cursor does not support the pagination action "${ action }".`); + } + return backend.traverseCursor(cursor, action); +} + +export function traverseCollectionCursor(collection, action) { + return async (dispatch, getState) => { + const state = getState(); + if (state.entries.getIn(['pages', `${ collection.get('name') }`, 'isFetching',])) { + return; + } + const backend = currentBackend(state.config); + + const { action: realAction, append } = appendActions.has(action) + ? appendActions.get(action).toJS() + : { action, append: false }; + const cursor = selectCollectionEntriesCursor(state.cursors, collection.get('name')); + + // Handle cursors representing pages in the old, integer-based + // pagination API + if (cursor.meta.get("usingOldPaginationAPI", false)) { + return dispatch(loadEntries(collection, cursor.data.get("nextPage"))); + } + + try { + dispatch(entriesLoading(collection)); + const { entries, cursor: newCursor } = await traverseCursor(backend, cursor, realAction); + // Pass null for the old pagination argument - this will + // eventually be removed. + return dispatch(entriesLoaded(collection, entries, null, addAppendActionsToCursor(newCursor), append)); + } catch (err) { + console.error(err); + dispatch(notifSend({ + message: `Failed to persist entry: ${ err }`, + kind: 'danger', + dismissAfter: 8000, + })); + return Promise.reject(dispatch(entriesFailed(collection, err))); + } + } +} + export function createEmptyDraft(collection) { return (dispatch) => { const dataFields = {}; diff --git a/src/actions/search.js b/src/actions/search.js index dfc22b99348c..6f08608014e1 100644 --- a/src/actions/search.js +++ b/src/actions/search.js @@ -105,121 +105,44 @@ export function clearSearch() { // SearchEntries will search for complete entries in all collections. export function searchEntries(searchTerm, page = 0) { return (dispatch, getState) => { + dispatch(searchingEntries(searchTerm)); + const state = getState(); + const backend = currentBackend(state.config); const allCollections = state.collections.keySeq().toArray(); const collections = allCollections.filter(collection => selectIntegration(state, collection, 'search')); const integration = selectIntegration(state, collections[0], 'search'); - if (!integration) { - localSearch(searchTerm, getState, dispatch); - } else { - const provider = getIntegrationProvider(state.integrations, currentBackend(state.config).getToken, integration); - dispatch(searchingEntries(searchTerm)); - provider.search(collections, searchTerm, page).then( - response => dispatch(searchSuccess(searchTerm, response.entries, response.pagination)), - error => dispatch(searchFailure(searchTerm, error)) - ); - } + + const searchPromise = integration + ? getIntegrationProvider(state.integrations, backend.getToken, integration).search(collections, searchTerm, page) + : backend.search(state.collections.valueSeq().toArray(), searchTerm); + + return searchPromise.then( + response => dispatch(searchSuccess(searchTerm, response.entries, response.pagination)), + error => dispatch(searchFailure(searchTerm, error)) + ); }; } // Instead of searching for complete entries, query will search for specific fields // in specific collections and return raw data (no entries). -export function query(namespace, collection, searchFields, searchTerm) { +export function query(namespace, collectionName, searchFields, searchTerm) { return (dispatch, getState) => { + dispatch(querying(namespace, collectionName, searchFields, searchTerm)); + const state = getState(); - const integration = selectIntegration(state, collection, 'search'); - dispatch(querying(namespace, collection, searchFields, searchTerm)); - if (!integration) { - localQuery(namespace, collection, searchFields, searchTerm, state, dispatch); - } else { - const provider = getIntegrationProvider(state.integrations, currentBackend(state.config).getToken, integration); - provider.searchBy(searchFields.map(f => `data.${ f }`), collection, searchTerm).then( - response => dispatch(querySuccess(namespace, collection, searchFields, searchTerm, response)), - error => dispatch(queryFailure(namespace, collection, searchFields, searchTerm, error)) - ); - } + const backend = currentBackend(state.config); + const integration = selectIntegration(state, collectionName, 'search'); + const collection = state.collections.find(collection => collection.get('name') === collectionName); + + const queryPromise = integration + ? getIntegrationProvider(state.integrations, backend.getToken, integration) + .searchBy(searchFields.map(f => `data.${ f }`), collectionName, searchTerm) + : backend.query(collection, searchFields, searchTerm); + + return queryPromise.then( + response => dispatch(querySuccess(namespace, collectionName, searchFields, searchTerm, response)), + error => dispatch(queryFailure(namespace, collectionName, searchFields, searchTerm, error)) + ); }; } - -// Local Query & Search functions - -function localSearch(searchTerm, getState, dispatch) { - return (function acc(localResults = { entries: [] }) { - function processCollection(collection, collectionKey) { - const state = getState(); - if (state.entries.hasIn(['pages', collectionKey, 'ids'])) { - const searchFields = [ - selectInferedField(collection, 'title'), - selectInferedField(collection, 'shortTitle'), - selectInferedField(collection, 'author'), - ]; - const collectionEntries = selectEntries(state, collectionKey).toJS(); - const filteredEntries = fuzzy.filter(searchTerm, collectionEntries, { - extract: entry => searchFields.reduce((acc, field) => { - const f = entry.data[field]; - return f ? `${ acc } ${ f }` : acc; - }, ""), - }).filter(entry => entry.score > 5); - localResults[collectionKey] = true; - localResults.entries = localResults.entries.concat(filteredEntries); - - const returnedKeys = Object.keys(localResults); - const allCollections = state.collections.keySeq().toArray(); - if (allCollections.every(v => returnedKeys.indexOf(v) !== -1)) { - const sortedResults = localResults.entries.sort((a, b) => { - if (a.score > b.score) return -1; - if (a.score < b.score) return 1; - return 0; - }).map(f => f.original); - if (allCollections.size > 3 || localResults.entries.length > 30) { - console.warn('The Netlify CMS is currently using a Built-in search.' + - '\nWhile this works great for small sites, bigger projects might benefit from a separate search integration.' + - '\nPlease refer to the documentation for more information'); - } - dispatch(searchSuccess(searchTerm, sortedResults, 0)); - } - } else { - // Collection entries aren't loaded yet. - // Dispatch loadEntries and wait before redispatching this action again. - dispatch({ - type: WAIT_UNTIL_ACTION, - predicate: action => (action.type === ENTRIES_SUCCESS && action.payload.collection === collectionKey), - run: () => processCollection(collection, collectionKey), - }); - dispatch(loadEntries(collection)); - } - } - getState().collections.forEach(processCollection); - }()); -} - - -function localQuery(namespace, collection, searchFields, searchTerm, state, dispatch) { - // Check if entries in this collection were already loaded - if (state.entries.hasIn(['pages', collection, 'ids'])) { - const entries = selectEntries(state, collection).toJS(); - const filteredEntries = fuzzy.filter(searchTerm, entries, { - extract: entry => searchFields.reduce((acc, field) => { - const f = entry.data[field]; - return f ? `${ acc } ${ f }` : acc; - }, ""), - }).filter(entry => entry.score > 5); - - const resultObj = { - query: searchTerm, - hits: [], - }; - - resultObj.hits = filteredEntries.map(f => f.original); - dispatch(querySuccess(namespace, collection, searchFields, searchTerm, resultObj)); - } else { - // Collection entries aren't loaded yet. - // Dispatch loadEntries and wait before redispatching this action again. - dispatch({ - type: WAIT_UNTIL_ACTION, - predicate: action => (action.type === ENTRIES_SUCCESS && action.payload.collection === collection), - run: dispatch => dispatch(query(namespace, collection, searchFields, searchTerm)), - }); - dispatch(loadEntries(state.collections.get(collection))); - } -} diff --git a/src/backends/backend.js b/src/backends/backend.js index cf1962bcf0a5..db6f53888ee2 100644 --- a/src/backends/backend.js +++ b/src/backends/backend.js @@ -1,5 +1,6 @@ -import { attempt, isError } from 'lodash'; -import { Map } from 'immutable'; +import { attempt, flatten, isError } from 'lodash'; +import { fromJS, Map } from 'immutable'; +import fuzzy from 'fuzzy'; import { resolveFormat } from "Formats/formats"; import { selectIntegration } from 'Reducers/integrations'; import { @@ -10,19 +11,23 @@ import { selectAllowDeletion, selectFolderEntryExtension, selectIdentifier, + selectInferedField, } from "Reducers/collections"; import { createEntry } from "ValueObjects/Entry"; import { sanitizeSlug } from "Lib/urlHelper"; import TestRepoBackend from "./test-repo/implementation"; import GitHubBackend from "./github/implementation"; +import GitLabBackend from "./gitlab/implementation"; import GitGatewayBackend from "./git-gateway/implementation"; import { registerBackend, getBackend } from 'Lib/registry'; +import Cursor, { CURSOR_COMPATIBILITY_SYMBOL } from '../valueObjects/Cursor'; /** * Register internal backends */ registerBackend('git-gateway', GitGatewayBackend); registerBackend('github', GitHubBackend); +registerBackend('gitlab', GitLabBackend); registerBackend('test-repo', TestRepoBackend); @@ -107,6 +112,17 @@ const commitMessageFormatter = (type, config, { slug, path, collection }) => { }); } +const extractSearchFields = searchFields => entry => searchFields.reduce((acc, field) => { + const f = entry.data[field]; + return f ? `${acc} ${f}` : acc; +}, ""); + +const sortByScore = (a, b) => { + if (a.score > b.score) return -1; + if (a.score < b.score) return 1; + return 0; +}; + class Backend { constructor(implementation, backendName, authStore = null) { this.implementation = implementation; @@ -153,30 +169,112 @@ class Backend { getToken = () => this.implementation.getToken(); + processEntries(loadedEntries, collection) { + const collectionFilter = collection.get('filter'); + const entries = loadedEntries.map(loadedEntry => createEntry( + collection.get("name"), + selectEntrySlug(collection, loadedEntry.file.path), + loadedEntry.file.path, + { raw: loadedEntry.data || '', label: loadedEntry.file.label } + )); + const formattedEntries = entries.map(this.entryWithFormat(collection)); + // If this collection has a "filter" property, filter entries accordingly + const filteredEntries = collectionFilter + ? this.filterEntries({ entries: formattedEntries }, collectionFilter) + : formattedEntries; + return filteredEntries; + } + + listEntries(collection) { const listMethod = this.implementation[selectListMethod(collection)]; const extension = selectFolderEntryExtension(collection); - const collectionFilter = collection.get('filter'); return listMethod.call(this.implementation, collection, extension) - .then(loadedEntries => ( - loadedEntries.map(loadedEntry => createEntry( - collection.get("name"), - selectEntrySlug(collection, loadedEntry.file.path), - loadedEntry.file.path, - { raw: loadedEntry.data || '', label: loadedEntry.file.label } - )) - )) - .then(entries => ( - { - entries: entries.map(this.entryWithFormat(collection)), - } - )) - // If this collection has a "filter" property, filter entries accordingly - .then(loadedCollection => ( - { - entries: collectionFilter ? this.filterEntries(loadedCollection, collectionFilter) : loadedCollection.entries - } - )); + .then(loadedEntries => ({ + entries: this.processEntries(loadedEntries, collection), + /* + Wrap cursors so we can tell which collection the cursor is + from. This is done to prevent traverseCursor from requiring a + `collection` argument. + */ + cursor: Cursor.create(loadedEntries[CURSOR_COMPATIBILITY_SYMBOL]).wrapData({ + cursorType: "collectionEntries", + collection, + }), + })); + } + + // The same as listEntries, except that if a cursor with the "next" + // action available is returned, it calls "next" on the cursor and + // repeats the process. Once there is no available "next" action, it + // returns all the collected entries. Used to retrieve all entries + // for local searches and queries. + async listAllEntries(collection) { + if (collection.get("folder") && this.implementation.allEntriesByFolder) { + const extension = selectFolderEntryExtension(collection); + return this.implementation.allEntriesByFolder(collection, extension) + .then(entries => this.processEntries(entries, collection)); + } + + const response = await this.listEntries(collection); + const { entries } = response; + let { cursor } = response; + while (cursor && cursor.actions.includes("next")) { + const { entries: newEntries, cursor: newCursor } = await this.traverseCursor(cursor, "next"); + entries.push(...newEntries); + cursor = newCursor; + } + return entries; + } + + async search(collections, searchTerm) { + // Perform a local search by requesting all entries. For each + // collection, load it, search, and call onCollectionResults with + // its results. + const errors = []; + const collectionEntriesRequests = collections.map(async collection => { + // TODO: pass search fields in as an argument + const searchFields = [ + selectInferedField(collection, 'title'), + selectInferedField(collection, 'shortTitle'), + selectInferedField(collection, 'author'), + ]; + const collectionEntries = await this.listAllEntries(collection); + return fuzzy.filter(searchTerm, collectionEntries, { + extract: extractSearchFields(searchFields), + }); + }).map(p => p.catch(err => errors.push(err) && [])); + + const entries = await Promise.all(collectionEntriesRequests).then(arrs => flatten(arrs)); + + if (errors.length > 0) { + throw new Error({ message: "Errors ocurred while searching entries locally!", errors }); + } + const hits = entries.filter(({ score }) => score > 5).sort(sortByScore).map(f => f.original); + return { entries: hits }; + } + + async query(collection, searchFields, searchTerm) { + const entries = await this.listAllEntries(collection); + const hits = fuzzy.filter(searchTerm, entries, { extract: extractSearchFields(searchFields) }) + .filter(entry => entry.score > 5) + .sort(sortByScore) + .map(f => f.original); + return { query: searchTerm, hits }; + } + + traverseCursor(cursor, action) { + const [data, unwrappedCursor] = cursor.unwrapData(); + // TODO: stop assuming all cursors are for collections + const collection = data.get("collection"); + return this.implementation.traverseCursor(unwrappedCursor, action) + .then(async ({ entries, cursor: newCursor }) => ({ + entries: this.processEntries(entries, collection), + cursor: Cursor.create(newCursor).wrapData({ + cursorType: "collectionEntries", + collection, + }), + })); } getEntry(collection, slug) { diff --git a/src/backends/git-gateway/GitHubAPI.js b/src/backends/git-gateway/GitHubAPI.js new file mode 100644 index 000000000000..332c7ced6bab --- /dev/null +++ b/src/backends/git-gateway/GitHubAPI.js @@ -0,0 +1,106 @@ +import GithubAPI from "Backends/github/API"; +import { APIError } from "ValueObjects/errors"; + +export default class API extends GithubAPI { + constructor(config) { + super(config); + this.api_root = config.api_root; + this.tokenPromise = config.tokenPromise; + this.commitAuthor = config.commitAuthor; + this.repoURL = ""; + } + + hasWriteAccess() { + return this.getBranch() + .then(() => true) + .catch(error => { + if (error.status === 401) { + if (error.message === "Bad credentials") { + throw new APIError("Git Gateway Error: Please ask your site administrator to reissue the Git Gateway token.", error.status, 'Git Gateway'); + } else { + return false; + } + } else if (error.status === 404 && (error.message === undefined || error.message === "Unable to locate site configuration")) { + throw new APIError(`Git Gateway Error: Please make sure Git Gateway is enabled on your site.`, error.status, 'Git Gateway'); + } else { + console.error("Problem fetching repo data from Git Gateway"); + throw error; + } + }); + } + + getRequestHeaders(headers = {}) { + return this.tokenPromise() + .then((jwtToken) => { + const baseHeader = { + "Authorization": `Bearer ${ jwtToken }`, + "Content-Type": "application/json", + ...headers, + }; + + return baseHeader; + }); + } + + + urlFor(path, options) { + const cacheBuster = new Date().getTime(); + const params = [`ts=${ cacheBuster }`]; + if (options.params) { + for (const key in options.params) { + params.push(`${ key }=${ encodeURIComponent(options.params[key]) }`); + } + } + if (params.length) { + path += `?${ params.join("&") }`; + } + return this.api_root + path; + } + + user() { + return Promise.resolve(this.commitAuthor); + } + + request(path, options = {}) { + const url = this.urlFor(path, options); + let responseStatus; + return this.getRequestHeaders(options.headers || {}) + .then(headers => fetch(url, { ...options, headers })) + .then((response) => { + responseStatus = response.status; + const contentType = response.headers.get("Content-Type"); + if (contentType && contentType.match(/json/)) { + return this.parseJsonResponse(response); + } + const text = response.text(); + if (!response.ok) { + return Promise.reject(text); + } + return text; + }) + .catch(error => { + throw new APIError((error.message || error.msg), responseStatus, 'Git Gateway'); + }); + } + + commit(message, changeTree) { + const commitParams = { + message, + tree: changeTree.sha, + parents: changeTree.parentSha ? [changeTree.parentSha] : [], + }; + + if (this.commitAuthor) { + commitParams.author = { + ...this.commitAuthor, + date: new Date().toISOString(), + }; + } + + return this.request("/git/commits", { + method: "POST", + body: JSON.stringify(commitParams), + }); + } + +} diff --git a/src/backends/git-gateway/GitLabAPI.js b/src/backends/git-gateway/GitLabAPI.js new file mode 100644 index 000000000000..0d21ca067372 --- /dev/null +++ b/src/backends/git-gateway/GitLabAPI.js @@ -0,0 +1,25 @@ +import { flow } from "lodash"; +import unsentRequest from "Lib/unsentRequest"; +import { then } from "Lib/promiseHelper"; +import GitlabAPI from "Backends/gitlab/API"; + +export default class API extends GitlabAPI { + constructor(config) { + super(config); + this.tokenPromise = config.tokenPromise; + this.commitAuthor = config.commitAuthor; + this.repoURL = ""; + } + + authenticateRequest = async req => unsentRequest.withHeaders({ + Authorization: `Bearer ${ await this.tokenPromise() }`, + }, req); + + request = async req => flow([ + this.buildRequest, + this.authenticateRequest, + then(unsentRequest.performRequest), + ])(req); + + hasWriteAccess = () => Promise.resolve(true) +} diff --git a/src/backends/git-gateway/implementation.js b/src/backends/git-gateway/implementation.js index fe9b8ea0b6b0..9933b50639b8 100644 --- a/src/backends/git-gateway/implementation.js +++ b/src/backends/git-gateway/implementation.js @@ -3,18 +3,20 @@ import jwtDecode from 'jwt-decode'; import {List} from 'immutable'; import { get, pick, intersection } from "lodash"; import GitHubBackend from "Backends/github/implementation"; -import API from "./API"; +import GitLabBackend from "Backends/gitlab/implementation"; +import GitHubAPI from "./GitHubAPI"; +import GitLabAPI from "./GitLabAPI"; import AuthenticationPage from "./AuthenticationPage"; const localHosts = { localhost: true, '127.0.0.1': true, - '0.0.0.0': true -} + '0.0.0.0': true, +}; const defaults = { identity: '/.netlify/identity', - gateway: '/.netlify/git/github' -} + gateway: '/.netlify/git', +}; function getEndpoint(endpoint, netlifySiteURL) { if (localHosts[document.location.host.split(":").shift()] && netlifySiteURL && endpoint.match(/^\/\.netlify\//)) { @@ -29,65 +31,88 @@ function getEndpoint(endpoint, netlifySiteURL) { return endpoint; } -export default class GitGateway extends GitHubBackend { +export default class GitGateway { constructor(config) { - super(config, true); - - this.accept_roles = (config.getIn(["backend", "accept_roles"]) || List()).toArray(); + this.config = config; + this.branch = config.getIn(["backend", "branch"], "master").trim(); + this.squash_merges = config.getIn(["backend", "squash_merges"]); const netlifySiteURL = localStorage.getItem("netlifySiteURL"); const APIUrl = getEndpoint(config.getIn(["backend", "identity_url"], defaults.identity), netlifySiteURL); - this.github_proxy_url = getEndpoint(config.getIn(["backend", "gateway_url"], defaults.gateway), netlifySiteURL); - this.authClient = window.netlifyIdentity ? window.netlifyIdentity.gotrue : new GoTrue({APIUrl}); + this.gatewayUrl = getEndpoint(config.getIn(["backend", "gateway_url"], defaults.gateway), netlifySiteURL); + + const backendTypeRegex = /\/(github|gitlab)\/?$/; + const backendTypeMatches = this.gatewayUrl.match(backendTypeRegex); + if (backendTypeMatches) { + this.backendType = backendTypeMatches[1]; + this.gatewayUrl = this.gatewayUrl.replace(backendTypeRegex, "/"); + } else { + this.backendType = null; + } + this.authClient = window.netlifyIdentity ? window.netlifyIdentity.gotrue : new GoTrue({ APIUrl }); AuthenticationPage.authClient = this.authClient; - } - restoreUser() { - const user = this.authClient && this.authClient.currentUser(); - if (!user) return Promise.reject(); - return this.authenticate(user); + this.backend = null; } - authenticate(user) { this.tokenPromise = user.jwt.bind(user); - return this.tokenPromise() - .then((token) => { - let validRole = true; - if (this.accept_roles && this.accept_roles.length > 0) { + return this.tokenPromise().then(async token => { + if (!this.backendType) { + const { github_enabled, gitlab_enabled, roles } = await fetch(`${ this.gatewayUrl }/settings`, { + headers: { Authorization: `Bearer ${ token }` }, + }).then(res => res.json()); + this.acceptRoles = roles; + if (github_enabled) { + this.backendType = "github"; + } else if (gitlab_enabled) { + this.backendType = "gitlab"; + } + } + + if (this.acceptRoles && this.acceptRoles.length > 0) { const userRoles = get(jwtDecode(token), 'app_metadata.roles', []); - validRole = intersection(userRoles, this.accept_roles).length > 0; + const validRole = intersection(userRoles, this.acceptRoles).length > 0; + if (!validRole) { + throw new Error("You don't have sufficient permissions to access Netlify CMS"); + } + } + + const userData = { + name: user.user_metadata.name || user.email.split('@').shift(), + email: user.email, + avatar_url: user.user_metadata.avatar_url, + metadata: user.user_metadata, + }; + const apiConfig = { + api_root: `${ this.gatewayUrl }/${ this.backendType }`, + branch: this.branch, + tokenPromise: this.tokenPromise, + commitAuthor: pick(userData, ["name", "email"]), + squash_merges: this.squash_merges, + }; + + if (this.backendType === "github") { + this.api = new GitHubAPI(apiConfig); + this.backend = new GitHubBackend(this.config, { proxied: true, API: this.api }); + } else if (this.backendType === "gitlab") { + this.api = new GitLabAPI(apiConfig); + this.backend = new GitLabBackend(this.config, { proxied: true, API: this.api }); } - if (validRole) { - const userData = { - name: user.user_metadata.name || user.email.split('@').shift(), - email: user.email, - avatar_url: user.user_metadata.avatar_url, - metadata: user.user_metadata, - }; - this.api = new API({ - api_root: this.github_proxy_url, - branch: this.branch, - tokenPromise: this.tokenPromise, - commitAuthor: pick(userData, ["name", "email"]), - squash_merges: this.squash_merges, - }); - return userData; - } else { + + if (!(await this.api.hasWriteAccess())) { throw new Error("You don't have sufficient permissions to access Netlify CMS"); } - }) - .then(userData => - this.api.hasWriteAccess().then(canWrite => { - if (canWrite) { - return userData; - } else { - throw new Error("You don't have sufficient permissions to access Netlify CMS"); - } - }) - ); + }); + } + restoreUser() { + const user = this.authClient && this.authClient.currentUser(); + if (!user) return Promise.reject(); + return this.authenticate(user); + } + authComponent() { + return AuthenticationPage; } - logout() { if (window.netlifyIdentity) { return window.netlifyIdentity.logout(); @@ -95,13 +120,22 @@ export default class GitGateway extends GitHubBackend { const user = this.authClient.currentUser(); return user && user.logout(); } - getToken() { return this.tokenPromise(); } - authComponent() { - return AuthenticationPage; - } - + entriesByFolder(collection, extension) { return this.backend.entriesByFolder(collection, extension); } + entriesByFiles(collection) { return this.backend.entriesByFiles(collection); } + fetchFiles(files) { return this.backend.fetchFiles(files); } + getEntry(collection, slug, path) { return this.backend.getEntry(collection, slug, path); } + getMedia() { return this.backend.getMedia(); } + persistEntry(entry, mediaFiles, options) { return this.backend.persistEntry(entry, mediaFiles, options); } + persistMedia(mediaFile, options) { return this.backend.persistMedia(mediaFile, options); } + deleteFile(path, commitMessage, options) { return this.backend.deleteFile(path, commitMessage, options); } + unpublishedEntries() { return this.backend.unpublishedEntries(); } + unpublishedEntry(collection, slug) { return this.backend.unpublishedEntry(collection, slug); } + updateUnpublishedEntryStatus(collection, slug, newStatus) { return this.backend.updateUnpublishedEntryStatus(collection, slug, newStatus); } + deleteUnpublishedEntry(collection, slug) { return this.backend.deleteUnpublishedEntry(collection, slug); } + publishUnpublishedEntry(collection, slug) { return this.backend.publishUnpublishedEntry(collection, slug); } + traverseCursor(cursor, action) { return this.backend.traverseCursor(cursor, action); } } diff --git a/src/backends/github/implementation.js b/src/backends/github/implementation.js index 2fa63f4b72aa..45653c0fe699 100644 --- a/src/backends/github/implementation.js +++ b/src/backends/github/implementation.js @@ -6,13 +6,20 @@ import API from "./API"; const MAX_CONCURRENT_DOWNLOADS = 10; export default class GitHub { - constructor(config, proxied = false) { + constructor(config, options={}) { this.config = config; + this.options = { + proxied: false, + API: null, + ...options, + }; - if (!proxied && config.getIn(["backend", "repo"]) == null) { + if (!this.options.proxied && config.getIn(["backend", "repo"]) == null) { throw new Error("The GitHub backend needs a \"repo\" in the backend configuration."); } + this.api = this.options.API || null; + this.repo = config.getIn(["backend", "repo"], ""); this.branch = config.getIn(["backend", "branch"], "master").trim(); this.api_root = config.getIn(["backend", "api_root"], "https://api.github.com"); diff --git a/src/backends/gitlab/API.js b/src/backends/gitlab/API.js new file mode 100644 index 000000000000..7cbdff7797fe --- /dev/null +++ b/src/backends/gitlab/API.js @@ -0,0 +1,224 @@ +import LocalForage from "Lib/LocalForage"; +import { Base64 } from "js-base64"; +import { fromJS, List, Map } from "immutable"; +import { cond, flow, isString, partial, partialRight, pick, omit, set, update } from "lodash"; +import unsentRequest from "Lib/unsentRequest"; +import { then } from "Lib/promiseHelper"; +import AssetProxy from "ValueObjects/AssetProxy"; +import { APIError } from "ValueObjects/errors"; +import Cursor from "ValueObjects/Cursor" + +export default class API { + constructor(config) { + this.api_root = config.api_root || "https://gitlab.com/api/v4"; + this.token = config.token || false; + this.branch = config.branch || "master"; + this.repo = config.repo || ""; + this.repoURL = `/projects/${ encodeURIComponent(this.repo) }`; + } + + withAuthorizationHeaders = req => + unsentRequest.withHeaders(this.token ? { Authorization: `Bearer ${ this.token }` } : {}, req); + + buildRequest = req => flow([ + unsentRequest.withRoot(this.api_root), + this.withAuthorizationHeaders, + unsentRequest.withTimestamp, + ])(req); + + request = async req => flow([ + this.buildRequest, + unsentRequest.performRequest, + p => p.catch(err => Promise.reject(new APIError(err.message, null, "GitLab"))), + ])(req); + + parseResponse = async (res, { expectingOk=true, expectingFormat=false }) => { + const contentType = res.headers.get("Content-Type"); + const isJSON = contentType === "application/json"; + let body; + try { + body = await ((expectingFormat === "json" || isJSON) ? res.json() : res.text()); + } catch (err) { + throw new APIError(err.message, res.status, "GitLab"); + } + if (expectingOk && !res.ok) { + throw new APIError((isJSON && body.message) ? body.message : body, res.status, "GitLab"); + } + return body; + }; + + responseToJSON = res => this.parseResponse(res, { expectingFormat: "json" }); + responseToText = res => this.parseResponse(res, { expectingFormat: "text" }); + requestJSON = req => this.request(req).then(this.responseToJSON); + requestText = req => this.request(req).then(this.responseToText); + + user = () => this.requestJSON("/user"); + + WRITE_ACCESS = 30; + hasWriteAccess = user => this.requestJSON(this.repoURL).then(({ permissions }) => { + const { project_access, group_access } = permissions; + if (project_access && (project_access.access_level >= this.WRITE_ACCESS)) { + return true; + } + if (group_access && (group_access.access_level >= this.WRITE_ACCESS)) { + return true; + } + return false; + }); + + readFile = async (path, sha, ref=this.branch) => { + const cachedFile = sha ? await LocalForage.getItem(`gl.${ sha }`) : null; + if (cachedFile) { return cachedFile; } + const result = await this.requestText({ + url: `${ this.repoURL }/repository/files/${ encodeURIComponent(path) }/raw`, + params: { ref }, + cache: "no-store", + }); + if (sha) { LocalForage.setItem(`gl.${ sha }`, result) } + return result; + }; + + fileDownloadURL = (path, ref=this.branch) => unsentRequest.toURL(this.buildRequest({ + url: `${ this.repoURL }/repository/files/${ encodeURIComponent(path) }/raw`, + params: { ref }, + })); + + getCursorFromHeaders = headers => { + // indices and page counts are assumed to be zero-based, but the + // indices and page counts returned from GitLab are one-based + const index = parseInt(headers.get("X-Page"), 10) - 1; + const pageCount = parseInt(headers.get("X-Total-Pages"), 10) - 1; + const pageSize = parseInt(headers.get("X-Per-Page"), 10); + const count = parseInt(headers.get("X-Total"), 10); + const linksRaw = headers.get("Link"); + const links = List(linksRaw.split(",")) + .map(str => str.trim().split(";")) + .map(([linkStr, keyStr]) => [ + keyStr.match(/rel="(.*?)"/)[1], + unsentRequest.fromURL(linkStr.trim().match(/<(.*?)>/)[1]), + ]) + .update(list => Map(list)); + const actions = links.keySeq().flatMap(key => ( + (key === "prev" && index > 0) || + (key === "next" && index < pageCount) || + (key === "first" && index > 0) || + (key === "last" && index < pageCount) + ) ? [key] : []); + return Cursor.create({ + actions, + meta: { index, count, pageSize, pageCount }, + data: { links }, + }); + }; + + getCursor = ({ headers }) => this.getCursorFromHeaders(headers); + + // Gets a cursor without retrieving the entries by using a HEAD + // request + fetchCursor = req => flow([unsentRequest.withMethod("HEAD"), this.request, then(this.getCursor)])(req); + fetchCursorAndEntries = req => flow([ + unsentRequest.withMethod("GET"), + this.request, + p => Promise.all([p.then(this.getCursor), p.then(this.responseToJSON)]), + then(([cursor, entries]) => ({ cursor, entries })), + ])(req); + fetchRelativeCursor = async (cursor, action) => this.fetchCursor(cursor.data.links[action]); + + reversableActions = Map({ + first: "last", + last: "first", + next: "prev", + prev: "next", + }); + + reverseCursor = cursor => { + const pageCount = cursor.meta.get("pageCount", 0); + const currentIndex = cursor.meta.get("index", 0); + const newIndex = pageCount - currentIndex; + + const links = cursor.data.get("links", Map()); + const reversedLinks = links.mapEntries(([k, v]) => [this.reversableActions.get(k) || k, v]); + + const reversedActions = cursor.actions.map(action => this.reversableActions.get(action) || action); + + return cursor.updateStore(store => store + .setIn(["meta", "index"], newIndex) + .setIn(["data", "links"], reversedLinks) + .set("actions", reversedActions)); + }; + + // The exported listFiles and traverseCursor reverse the direction + // of the cursors, since GitLab's pagination sorts the opposite way + // we want to sort by default (it sorts by filename _descending_, + // while the CMS defaults to sorting by filename _ascending_, at + // least in the current GitHub backend). This should eventually be + // refactored. + listFiles = async path => { + const firstPageCursor = await this.fetchCursor({ + url: `${ this.repoURL }/repository/tree`, + params: { path, ref: this.branch }, + }); + const lastPageLink = firstPageCursor.data.getIn(["links", "last"]); + const { entries, cursor } = await this.fetchCursorAndEntries(lastPageLink); + return { files: entries.filter(({ type }) => type === "blob").reverse(), cursor: this.reverseCursor(cursor) }; + }; + + traverseCursor = async (cursor, action) => { + const link = cursor.data.getIn(["links", action]); + const { entries, cursor: newCursor } = await this.fetchCursorAndEntries(link); + return { entries: entries.reverse(), cursor: this.reverseCursor(newCursor) }; + }; + + listAllFiles = async path => { + const entries = []; + let { cursor, entries: initialEntries } = await this.fetchCursorAndEntries({ + url: `${ this.repoURL }/repository/tree`, + // Get the maximum number of entries per page + params: { path, ref: this.branch, per_page: 100 }, + }); + entries.push(...initialEntries); + while (cursor && cursor.actions.has("next")) { + const link = cursor.data.getIn(["links", "next"]); + const { cursor: newCursor, entries: newEntries } = await this.fetchCursorAndEntries(link); + entries.push(...newEntries); + cursor = newCursor; + } + return entries.filter(({ type }) => type === "blob"); + }; + + toBase64 = str => Promise.resolve(Base64.encode(str)); + fromBase64 = str => Base64.decode(str); + uploadAndCommit = async (item, { commitMessage, updateFile = false, branch = this.branch, author = this.commitAuthor }) => { + const content = await (item instanceof AssetProxy ? item.toBase64() : this.toBase64(item.raw)); + const file_path = item.path.replace(/^\//, ""); + const action = (updateFile ? "update" : "create"); + const encoding = "base64"; + const { name: author_name, email: author_email } = pick(author || {}, ["name", "email"]); + const body = JSON.stringify({ + branch, + commit_message: commitMessage, + actions: [{ action, file_path, content, encoding }], + }); + + await this.request({ + url: `${ this.repoURL }/repository/commits`, + method: "POST", + headers: { "Content-Type": "application/json" }, + body, + }); + + return { ...item, uploaded: true }; + }; + + persistFiles = (files, { commitMessage, newEntry }) => + Promise.all(files.map(file => this.uploadAndCommit(file, { commitMessage, updateFile: newEntry === false }))); + + deleteFile = (path, commit_message, options = {}) => { + const branch = options.branch || this.branch; + return flow([ + unsentRequest.withMethod("DELETE"), + unsentRequest.withParams({ commit_message, branch }), + this.request, + ])(`${ this.repoURL }/repository/files/${ encodeURIComponent(path) }`); + }; +} diff --git a/src/backends/gitlab/AuthenticationPage.js b/src/backends/gitlab/AuthenticationPage.js new file mode 100644 index 000000000000..7769911e5fb6 --- /dev/null +++ b/src/backends/gitlab/AuthenticationPage.js @@ -0,0 +1,69 @@ +import PropTypes from 'prop-types'; +import React from 'react'; +import NetlifyAuthenticator from 'Lib/netlify-auth'; +import ImplicitAuthenticator from 'Lib/implicit-oauth'; +import { Icon } from 'UI'; + +export default class AuthenticationPage extends React.Component { + static propTypes = { + onLogin: PropTypes.func.isRequired, + inProgress: PropTypes.bool, + }; + + state = {}; + + componentDidMount() { + const authType = this.props.config.getIn(['backend', 'auth_type']); + if (authType === "implicit") { + this.auth = new ImplicitAuthenticator({ + base_url: this.props.config.getIn(['backend', 'base_url'], "https://gitlab.com"), + auth_endpoint: this.props.config.getIn(['backend', 'auth_endpoint'], 'oauth/authorize'), + appID: this.props.config.getIn(['backend', 'app_id']), + }); + // Complete implicit authentication if we were redirected back to from the provider. + this.auth.completeAuth((err, data) => { + if (err) { + this.setState({ loginError: err.toString() }); + return; + } + this.props.onLogin(data); + }); + } else { + this.auth = new NetlifyAuthenticator({ + base_url: this.props.base_url, + site_id: (document.location.host.split(':')[0] === 'localhost') ? 'cms.netlify.com' : this.props.siteId, + auth_endpoint: this.props.authEndpoint, + }); + } + } + + handleLogin = (e) => { + e.preventDefault(); + this.auth.authenticate({ provider: 'gitlab', scope: 'api' }, (err, data) => { + if (err) { + this.setState({ loginError: err.toString() }); + return; + } + this.props.onLogin(data); + }); + }; + + render() { + const { loginError } = this.state; + const { inProgress } = this.props; + + return ( +
+ + {loginError &&

{loginError}

} + +
+ ); + } +} diff --git a/src/backends/gitlab/implementation.js b/src/backends/gitlab/implementation.js new file mode 100644 index 000000000000..cb9222e8166a --- /dev/null +++ b/src/backends/gitlab/implementation.js @@ -0,0 +1,155 @@ +import trimStart from 'lodash/trimStart'; +import semaphore from "semaphore"; +import AuthenticationPage from "./AuthenticationPage"; +import API from "./API"; +import { fileExtension } from 'Lib/pathHelper'; +import { CURSOR_COMPATIBILITY_SYMBOL } from 'ValueObjects/Cursor'; +import { EDITORIAL_WORKFLOW } from "Constants/publishModes"; + +const MAX_CONCURRENT_DOWNLOADS = 10; + +export default class GitLab { + constructor(config, options={}) { + this.config = config; + this.options = { + proxied: false, + API: null, + ...options, + }; + + if (config.getIn(["publish_mode"]) === EDITORIAL_WORKFLOW) { + throw new Error("The GitLab backend does not support the Editorial Workflow.") + } + + if (!this.options.proxied && config.getIn(["backend", "repo"]) == null) { + throw new Error("The GitLab backend needs a \"repo\" in the backend configuration."); + } + + this.api = this.options.API || null; + + this.repo = config.getIn(["backend", "repo"], ""); + this.branch = config.getIn(["backend", "branch"], "master"); + this.api_root = config.getIn(["backend", "api_root"], "https://gitlab.com/api/v4"); + this.token = ''; + } + + authComponent() { + return AuthenticationPage; + } + + restoreUser(user) { + return this.authenticate(user); + } + + authenticate(state) { + this.token = state.token; + this.api = new API({ token: this.token, branch: this.branch, repo: this.repo, api_root: this.api_root }); + return this.api.user().then(user => + this.api.hasWriteAccess(user).then((isCollab) => { + // Unauthorized user + if (!isCollab) throw new Error("Your GitLab user account does not have access to this repo."); + // Authorized user + return Object.assign({}, user, { token: state.token }); + }) + ); + } + + logout() { + this.token = null; + return; + } + + getToken() { + return Promise.resolve(this.token); + } + + entriesByFolder(collection, extension) { + return this.api.listFiles(collection.get("folder")) + .then(({ files, cursor }) => + this.fetchFiles(files.filter(file => fileExtension(file.name) === extension)) + .then(fetchedFiles => { + const returnedFiles = fetchedFiles; + returnedFiles[CURSOR_COMPATIBILITY_SYMBOL] = cursor; + return returnedFiles; + }) + ); + } + + allEntriesByFolder(collection, extension) { + return this.api.listAllFiles(collection.get("folder")) + .then(files => this.fetchFiles(files.filter(file => fileExtension(file.name) === extension))); + } + + entriesByFiles(collection) { + const files = collection.get("files").map(collectionFile => ({ + path: collectionFile.get("file"), + label: collectionFile.get("label"), + })); + return this.fetchFiles(files).then(fetchedFiles => { + const returnedFiles = fetchedFiles; + return returnedFiles; + }); + } + + fetchFiles = (files) => { + const sem = semaphore(MAX_CONCURRENT_DOWNLOADS); + const promises = []; + files.forEach((file) => { + promises.push(new Promise((resolve, reject) => ( + sem.take(() => this.api.readFile(file.path, file.id).then((data) => { + resolve({ file, data }); + sem.leave(); + }).catch((error = true) => { + sem.leave(); + console.error(`failed to load file from GitLab: ${ file.path }`); + resolve({ error }); + })) + ))); + }); + return Promise.all(promises) + .then(loadedEntries => loadedEntries.filter(loadedEntry => !loadedEntry.error)); + }; + + // Fetches a single entry. + getEntry(collection, slug, path) { + return this.api.readFile(path).then(data => ({ + file: { path }, + data, + })); + } + + getMedia() { + return this.api.listAllFiles(this.config.get('media_folder')) + .then(files => files.map(({ id, name, path }) => { + const url = new URL(this.api.fileDownloadURL(path)); + if (url.pathname.match(/.svg$/)) { + url.search += (url.search.slice(1) === '' ? '?' : '&') + 'sanitize=true'; + } + return { id, name, url: url.href, path }; + })); + } + + + async persistEntry(entry, mediaFiles, options = {}) { + return this.api.persistFiles([entry], options); + } + + async persistMedia(mediaFile, options = {}) { + await this.api.persistFiles([mediaFile], options); + const { value, path, fileObj } = mediaFile; + const url = this.api.fileDownloadURL(path); + return { name: value, size: fileObj.size, url, path: trimStart(path, '/') }; + } + + deleteFile(path, commitMessage, options) { + return this.api.deleteFile(path, commitMessage, options); + } + + traverseCursor(cursor, action) { + return this.api.traverseCursor(cursor, action) + .then(async ({ entries, cursor: newCursor }) => ({ + entries: await Promise.all(entries.map(file => this.api.readFile(file.path, file.id).then(data => ({ file, data })))), + cursor: newCursor, + })); + } +} diff --git a/src/backends/test-repo/implementation.js b/src/backends/test-repo/implementation.js index e0f1e0d74d4e..945296b921fa 100644 --- a/src/backends/test-repo/implementation.js +++ b/src/backends/test-repo/implementation.js @@ -1,7 +1,9 @@ -import { remove, attempt, isError } from 'lodash'; +import { remove, attempt, isError, take } from 'lodash'; import uuid from 'uuid/v4'; +import { fromJS } from 'immutable'; import { EDITORIAL_WORKFLOW, status } from 'Constants/publishModes'; import { EditorialWorkflowError } from 'ValueObjects/errors'; +import Cursor, { CURSOR_COMPATIBILITY_SYMBOL } from 'ValueObjects/Cursor' import AuthenticationPage from './AuthenticationPage'; window.repoFiles = window.repoFiles || {}; @@ -16,6 +18,31 @@ function getFile(path) { return obj || {}; } +const pageSize = 10; + +const getCursor = (collection, extension, entries, index) => { + const count = entries.length; + const pageCount = Math.floor(count / pageSize); + return Cursor.create({ + actions: [ + ...(index < pageCount ? ["next", "last"] : []), + ...(index > 0 ? ["prev", "first"] : []), + ], + meta: { index, count, pageSize, pageCount }, + data: { collection, extension, index, pageCount }, + }); +}; + +const getFolderEntries = (folder, extension) => { + return Object.keys(window.repoFiles[folder]) + .filter(path => path.endsWith(`.${ extension }`)) + .map(path => ({ + file: { path: `${ folder }/${ path }` }, + data: window.repoFiles[folder][path].content, + })) + .reverse(); +}; + export default class TestRepo { constructor(config) { this.config = config; @@ -42,25 +69,28 @@ export default class TestRepo { return Promise.resolve(''); } + traverseCursor(cursor, action) { + const { collection, extension, index, pageCount } = cursor.data.toObject(); + const newIndex = (() => { + if (action === "next") { return index + 1; } + if (action === "prev") { return index - 1; } + if (action === "first") { return 0; } + if (action === "last") { return pageCount; } + })(); + // TODO: stop assuming cursors are for collections + const allEntries = getFolderEntries(collection.get('folder'), extension); + const entries = allEntries.slice(newIndex * pageSize, (newIndex * pageSize) + pageSize); + const newCursor = getCursor(collection, extension, allEntries, newIndex); + return Promise.resolve({ entries, cursor: newCursor }); + } + entriesByFolder(collection, extension) { - const entries = []; const folder = collection.get('folder'); - if (folder) { - for (const path in window.repoFiles[folder]) { - if (!path.endsWith('.' + extension)) { - continue; - } - - const file = { path: `${ folder }/${ path }` }; - entries.push( - { - file, - data: window.repoFiles[folder][path].content, - } - ); - } - } - return Promise.resolve(entries); + const entries = folder ? getFolderEntries(folder, extension) : []; + const cursor = getCursor(collection, extension, entries, 0); + const ret = take(entries, pageSize); + ret[CURSOR_COMPATIBILITY_SYMBOL] = cursor; + return Promise.resolve(ret); } entriesByFiles(collection) { @@ -101,7 +131,7 @@ export default class TestRepo { e.metaData.collection === collection && e.slug === slug )); unpubStore.splice(existingEntryIndex, 1); - return Promise.resolve() + return Promise.resolve(); } persistEntry({ path, raw, slug }, mediaFiles = [], options = {}) { diff --git a/src/components/Collection/Entries/Entries.js b/src/components/Collection/Entries/Entries.js index bc9974b2e33d..682d3956b134 100644 --- a/src/components/Collection/Entries/Entries.js +++ b/src/components/Collection/Entries/Entries.js @@ -11,7 +11,9 @@ const Entries = ({ page, onPaginate, isFetching, - viewStyle + viewStyle, + cursor, + handleCursorActions, }) => { const loadingMessages = [ 'Loading Entries', @@ -25,9 +27,9 @@ const Entries = ({ collections={collections} entries={entries} publicFolder={publicFolder} - page={page} - onPaginate={onPaginate} viewStyle={viewStyle} + cursor={cursor} + handleCursorActions={handleCursorActions} /> ); } @@ -46,6 +48,8 @@ Entries.propTypes = { page: PropTypes.number, isFetching: PropTypes.bool, viewStyle: PropTypes.string, + cursor: PropTypes.any.isRequired, + handleCursorActions: PropTypes.func.isRequired, }; export default Entries; diff --git a/src/components/Collection/Entries/EntriesCollection.js b/src/components/Collection/Entries/EntriesCollection.js index cf0c410bd027..da88a752dfcb 100644 --- a/src/components/Collection/Entries/EntriesCollection.js +++ b/src/components/Collection/Entries/EntriesCollection.js @@ -2,18 +2,26 @@ import React from 'react'; import PropTypes from 'prop-types'; import ImmutablePropTypes from 'react-immutable-proptypes'; import { connect } from 'react-redux'; -import { loadEntries as actionLoadEntries } from 'Actions/entries'; +import { partial } from 'lodash'; +import { + loadEntries as actionLoadEntries, + traverseCollectionCursor as actionTraverseCollectionCursor, +} from 'Actions/entries'; import { selectEntries } from 'Reducers'; +import { selectCollectionEntriesCursor } from 'Reducers/cursors'; +import Cursor from 'ValueObjects/Cursor'; import Entries from './Entries'; class EntriesCollection extends React.Component { static propTypes = { collection: ImmutablePropTypes.map.isRequired, publicFolder: PropTypes.string.isRequired, - page: PropTypes.number, entries: ImmutablePropTypes.list, isFetching: PropTypes.bool.isRequired, viewStyle: PropTypes.string, + cursor: PropTypes.object.isRequired, + loadEntries: PropTypes.func.isRequired, + traverseCollectionCursor: PropTypes.func.isRequired, }; componentDidMount() { @@ -30,31 +38,31 @@ class EntriesCollection extends React.Component { } } - handleLoadMore = page => { - const { collection, loadEntries } = this.props; - loadEntries(collection, page); - } + handleCursorActions = (cursor, action) => { + const { collection, traverseCollectionCursor } = this.props; + traverseCollectionCursor(collection, action); + }; render () { - const { collection, entries, publicFolder, page, isFetching, viewStyle } = this.props; + const { collection, entries, publicFolder, isFetching, viewStyle, cursor } = this.props; return ( ); } } function mapStateToProps(state, ownProps) { - const { name, collection, viewStyle } = ownProps; + const { collection, viewStyle } = ownProps; const { config } = state; const publicFolder = config.get('public_folder'); const page = state.entries.getIn(['pages', collection.get('name'), 'page']); @@ -62,11 +70,15 @@ function mapStateToProps(state, ownProps) { const entries = selectEntries(state, collection.get('name')); const isFetching = state.entries.getIn(['pages', collection.get('name'), 'isFetching'], false); - return { publicFolder, collection, page, entries, isFetching, viewStyle }; + const rawCursor = selectCollectionEntriesCursor(state.cursors, collection.get("name")); + const cursor = Cursor.create(rawCursor).clearData(); + + return { publicFolder, collection, page, entries, isFetching, viewStyle, cursor }; } const mapDispatchToProps = { loadEntries: actionLoadEntries, + traverseCollectionCursor: actionTraverseCollectionCursor, }; export default connect(mapStateToProps, mapDispatchToProps)(EntriesCollection); diff --git a/src/components/Collection/Entries/EntriesSearch.js b/src/components/Collection/Entries/EntriesSearch.js index daf58d006255..97d1d5990191 100644 --- a/src/components/Collection/Entries/EntriesSearch.js +++ b/src/components/Collection/Entries/EntriesSearch.js @@ -7,6 +7,7 @@ import { searchEntries as actionSearchEntries, clearSearch as actionClearSearch } from 'Actions/search'; +import Cursor from 'ValueObjects/Cursor'; import Entries from './Entries'; class EntriesSearch extends React.Component { @@ -36,15 +37,27 @@ class EntriesSearch extends React.Component { this.props.clearSearch(); } - handleLoadMore = (page) => { - const { searchTerm, searchEntries } = this.props; - if (!isNaN(page)) searchEntries(searchTerm, page); + getCursor = () => { + const { page } = this.props; + return Cursor.create({ + actions: isNaN(page) ? [] : ["append_next"], + }); + }; + + handleCursorActions = (action) => { + const { page, searchTerm, searchEntries } = this.props; + if (action === "append_next") { + const nextPage = page + 1; + searchEntries(searchTerm, nextPage); + } }; render () { const { collections, entries, publicFolder, page, isFetching } = this.props; return ( { - this.props.onPaginate(this.props.page + 1); + const { cursor, handleCursorActions } = this.props; + if (Cursor.create(cursor).actions.has("append_next")) { + handleCursorActions("append_next"); + } }; inferFields = collection => { @@ -48,12 +50,12 @@ export default class EntryListing extends React.Component { const collectionLabel = collection.get('label'); const inferedFields = this.inferFields(collection); const entryCardProps = { collection, entry, inferedFields, publicFolder, key: idx, collectionLabel }; - return ; + return ; }); }; render() { - const { collections, entries, publicFolder } = this.props; + const { collections } = this.props; return (
diff --git a/src/components/UI/ErrorBoundary/ErrorBoundary.js b/src/components/UI/ErrorBoundary/ErrorBoundary.js index d0d0e364d930..a8447e6d99a1 100644 --- a/src/components/UI/ErrorBoundary/ErrorBoundary.js +++ b/src/components/UI/ErrorBoundary/ErrorBoundary.js @@ -1,35 +1,36 @@ import PropTypes from 'prop-types'; import React from 'react'; -const ErrorComponent = () => { - const issueUrl = "https://github.com/netlify/netlify-cms/issues/new"; - return ( -
-

Sorry!

-

- There's been an error - please - report it! -

-
- ); +const DefaultErrorComponent = () => { }; -export class ErrorBoundary extends React.Component { - static propTypes = { - render: PropTypes.element, - }; +const ISSUE_URL = "https://github.com/netlify/netlify-cms/issues/new"; +export class ErrorBoundary extends React.Component { state = { hasError: false, + errorMessage: '', }; componentDidCatch(error) { console.error(error); - this.setState({ hasError: true }); + this.setState({ hasError: true, errorMessage: error.toString() }); } render() { - const errorComponent = this.props.errorComponent || ; - return this.state.hasError ? errorComponent : this.props.children; + const { hasError, errorMessage } = this.state; + if (!hasError) { + return this.props.children; + } + return ( +
+

Sorry!

+

+ There's been an error - please + report it! +

+

{errorMessage}

+
+ ); } } diff --git a/src/components/UI/Icon/images/_index.js b/src/components/UI/Icon/images/_index.js index f974d8736b06..352280f40bbe 100644 --- a/src/components/UI/Icon/images/_index.js +++ b/src/components/UI/Icon/images/_index.js @@ -13,6 +13,7 @@ import iconDragHandle from './drag-handle.svg'; import iconEye from './eye.svg'; import iconFolder from './folder.svg'; import iconGithub from './github.svg'; +import iconGitlab from './gitlab.svg'; import iconGrid from './grid.svg'; import iconH1 from './h1.svg'; import iconH2 from './h2.svg'; @@ -55,6 +56,7 @@ const images = { 'eye': iconEye, 'folder': iconFolder, 'github': iconGithub, + 'gitlab': iconGitlab, 'grid': iconGrid, 'h1': iconH1, 'h2': iconH2, diff --git a/src/components/UI/Icon/images/gitlab.svg b/src/components/UI/Icon/images/gitlab.svg new file mode 100644 index 000000000000..9d3134afcaf3 --- /dev/null +++ b/src/components/UI/Icon/images/gitlab.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/src/lib/implicit-oauth.js b/src/lib/implicit-oauth.js new file mode 100644 index 000000000000..cf5f2a0a3c4f --- /dev/null +++ b/src/lib/implicit-oauth.js @@ -0,0 +1,74 @@ +import { Map } from 'immutable'; +import { trim, trimEnd } from 'lodash'; +import { randomStr } from 'Lib/randomGenerator'; +import history from 'Routing/history'; + +function createNonce() { + const nonce = randomStr(); + window.sessionStorage.setItem("netlify-cms-auth", JSON.stringify({ nonce })); + return nonce; +} + +function validateNonce(check) { + const auth = window.sessionStorage.getItem("netlify-cms-auth"); + const valid = auth && JSON.parse(auth).nonce; + window.localStorage.removeItem("netlify-cms-auth"); + return (check === valid); +} + +export default class ImplicitAuthenticator { + constructor(config = {}) { + const baseURL = trimEnd(config.base_url, '/'); + const authEndpoint = trim(config.auth_endpoint, '/'); + this.auth_url = `${ baseURL }/${ authEndpoint }`; + this.appID = config.app_id; + } + + authenticate(options, cb) { + if ( + document.location.protocol !== "https:" + // TODO: Is insecure localhost a bad idea as well? I don't think it is, since you are not actually + // sending the token over the internet in this case, assuming the auth URL is secure. + && (document.location.hostname !== "localhost" && document.location.hostname !== "127.0.0.1") + ) { + return cb(new Error("Cannot authenticate over insecure protocol!")); + } + + const authURL = new URL(this.auth_url); + authURL.searchParams.set('client_id', this.appID); + authURL.searchParams.set('redirect_uri', document.location.origin + document.location.pathname); + authURL.searchParams.set('response_type', 'token'); + authURL.searchParams.set('scope', options.scope); + authURL.searchParams.set('state', createNonce()); + + document.location.assign(authURL.href); + } + + /** + * Complete authentication if we were redirected back to from the provider. + */ + completeAuth(cb) { + const hashParams = new URLSearchParams(document.location.hash.replace(/^#?\/?/, '')); + if (!hashParams.has("access_token") && !hashParams.has("error")) { + return; + } + // Remove tokens from hash so that token does not remain in browser history. + history.replace('/'); + + const params = Map(hashParams.entries()); + + const validNonce = validateNonce(params.get('state')); + if (!validNonce) { + return cb(new Error("Invalid nonce")); + } + + if (params.has('error')) { + return cb(new Error(`${ params.get('error') }: ${ params.get('error_description') }`)); + } + + if (params.has('access_token')) { + const { access_token: token, ...data } = params.toJS(); + cb(null, { token, ...data }); + } + } +} diff --git a/src/lib/promiseHelper.js b/src/lib/promiseHelper.js index 0d16bd5ec8d4..e4866d9bcff0 100644 --- a/src/lib/promiseHelper.js +++ b/src/lib/promiseHelper.js @@ -18,3 +18,5 @@ export const resolvePromiseProperties = (obj) => { // resolved values Object.assign({}, obj, zipObject(promiseKeys, resolvedPromises))); }; + +export const then = fn => p => Promise.resolve(p).then(fn); diff --git a/src/lib/randomGenerator.js b/src/lib/randomGenerator.js index 7d73aadc08c7..51831702df57 100644 --- a/src/lib/randomGenerator.js +++ b/src/lib/randomGenerator.js @@ -2,30 +2,17 @@ * Random number generator */ -let rng; - -if (window.crypto && crypto.getRandomValues) { - // WHATWG crypto-based RNG - http://wiki.whatwg.org/wiki/Crypto - // Moderately fast, high quality - const _rnds32 = new Uint32Array(1); - rng = function whatwgRNG() { - crypto.getRandomValues(_rnds32); - return _rnds32[0]; - }; +const padNumber = (num, base) => { + const padLen = (32 / Math.sqrt(base)); + const str = num.toString(base); + return (('0' * padLen) + str).slice(-padLen); } -if (!rng) { - // Math.random()-based (RNG) - // If no Crypto available, use Math.random(). - rng = function() { - const r = Math.random() * 0x100000000; - const _rnds = r >>> 0; - return _rnds; - }; -} +export function randomStr(len = 256) { + const _rnds = new Uint32Array(Math.ceil(len / 32)); + window.crypto.getRandomValues(_rnds); -export function randomStr() { - return rng().toString(36); -} + const str = _rnds.reduce((agg, val) => (agg + padNumber(val, 16)), ''); -export default rng; + return str.slice(-len); +} \ No newline at end of file diff --git a/src/lib/unsentRequest.js b/src/lib/unsentRequest.js new file mode 100644 index 000000000000..19f3cec7156f --- /dev/null +++ b/src/lib/unsentRequest.js @@ -0,0 +1,79 @@ +import { fromJS, List, Map } from 'immutable'; +import { curry, flow, isString } from "lodash"; + +const decodeParams = paramsString => List(paramsString.split("&")) + .map(s => List(s.split("=")).map(decodeURIComponent)) + .update(Map); + +const fromURL = wholeURL => { + const [url, allParamsString] = wholeURL.split("?"); + return Map({ url, ...(allParamsString ? { params: decodeParams(allParamsString) } : {}) }); +}; + +const encodeParams = params => params.entrySeq() + .map(([k, v]) => `${ encodeURIComponent(k) }=${ encodeURIComponent(v) }`) + .join("&"); + +const toURL = req => `${ req.get("url") }${ req.get("params") ? `?${ encodeParams(req.get("params")) }` : "" }`; + +const toFetchArguments = req => [toURL(req), req.delete("url").delete("params").toJS()]; + +const maybeRequestArg = req => { + if (isString(req)) { return fromURL(req); } + if (req) { return fromJS(req); } + return Map(); +}; +const ensureRequestArg = func => req => func(maybeRequestArg(req)); +const ensureRequestArg2 = func => (arg, req) => func(arg, maybeRequestArg(req)); + +// This actually performs the built request object +const performRequest = ensureRequestArg(req => fetch(...toFetchArguments(req))); + +// Each of the following functions takes options and returns another +// function that performs the requested action on a request. They each +// default to containing an empty object, so you can simply call them +// without arguments to generate a request with only those properties. +const getCurriedRequestProcessor = flow([ensureRequestArg2, curry]); +const getPropSetFunctions = path => [ + getCurriedRequestProcessor((val, req) => req.setIn(path, val)), + getCurriedRequestProcessor((val, req) => (req.getIn(path) ? req : req.setIn(path, val))), +]; +const getPropMergeFunctions = path => [ + getCurriedRequestProcessor((obj, req) => req.updateIn(path, (p=Map()) => p.merge(obj))), + getCurriedRequestProcessor((obj, req) => req.updateIn(path, (p=Map()) => Map(obj).merge(p))), +]; + +const [withMethod, withDefaultMethod] = getPropSetFunctions(["method"]); +const [withBody, withDefaultBody] = getPropSetFunctions(["method"]); +const [withParams, withDefaultParams] = getPropMergeFunctions(["params"]); +const [withHeaders, withDefaultHeaders] = getPropMergeFunctions(["headers"]); + +// withRoot sets a root URL, unless the URL is already absolute +const absolutePath = new RegExp('^(?:[a-z]+:)?//', 'i'); +const withRoot = getCurriedRequestProcessor((root, req) => req.update("url", p => { + if (absolutePath.test(p)) { return p; } + return (root && p && p[0] !== "/" && root[root.length - 1] !== "/") + ? `${ root }/${ p }` + : `${ root }${ p }`; +})); + +// withTimestamp needs no argument and has to run as late as possible, +// so it calls `withParams` only when it's actually called with a +// request. +const withTimestamp = ensureRequestArg(req => withParams({ ts: new Date().getTime() }, req)); + +export default { + toURL, + fromURL, + performRequest, + withMethod, + withDefaultMethod, + withBody, + withDefaultBody, + withHeaders, + withDefaultHeaders, + withParams, + withDefaultParams, + withRoot, + withTimestamp, +}; diff --git a/src/reducers/cursors.js b/src/reducers/cursors.js new file mode 100644 index 000000000000..525f53b21e17 --- /dev/null +++ b/src/reducers/cursors.js @@ -0,0 +1,27 @@ +import { fromJS, Map } from 'immutable'; +import Cursor from "ValueObjects/Cursor"; +import { + ENTRIES_SUCCESS, +} from 'Actions/entries'; + +// Since pagination can be used for a variety of views (collections +// and searches are the most common examples), we namespace cursors by +// their type before storing them in the state. +export const selectCollectionEntriesCursor = (state, collectionName) => + new Cursor(state.getIn(["cursorsByType", "collectionEntries", collectionName])); + +const cursors = (state = fromJS({ cursorsByType: { collectionEntries: {} } }), action) => { + switch (action.type) { + case ENTRIES_SUCCESS: { + return state.setIn( + ["cursorsByType", "collectionEntries", action.payload.collection], + Cursor.create(action.payload.cursor).store + ); + } + + default: + return state; + } +}; + +export default cursors; diff --git a/src/reducers/entries.js b/src/reducers/entries.js index f6e39591de43..8670ac1ba19e 100644 --- a/src/reducers/entries.js +++ b/src/reducers/entries.js @@ -13,6 +13,7 @@ import { SEARCH_ENTRIES_SUCCESS } from 'Actions/search'; let collection; let loadedEntries; +let append; let page; const entries = (state = Map({ entities: Map(), pages: Map() }), action) => { @@ -32,6 +33,7 @@ const entries = (state = Map({ entities: Map(), pages: Map() }), action) => { case ENTRIES_SUCCESS: collection = action.payload.collection; loadedEntries = action.payload.entries; + append = action.payload.append; page = action.payload.page; return state.withMutations((map) => { loadedEntries.forEach(entry => ( @@ -41,7 +43,9 @@ const entries = (state = Map({ entities: Map(), pages: Map() }), action) => { const ids = List(loadedEntries.map(entry => entry.slug)); map.setIn(['pages', collection], Map({ page, - ids: (!page || page === 0) ? ids : map.getIn(['pages', collection, 'ids'], List()).concat(ids), + ids: append + ? map.getIn(['pages', collection, 'ids'], List()).concat(ids) + : ids, })); }); diff --git a/src/reducers/index.js b/src/reducers/index.js index 2dc4334868d1..3c2c4ba7f74b 100644 --- a/src/reducers/index.js +++ b/src/reducers/index.js @@ -2,6 +2,7 @@ import auth from './auth'; import config from './config'; import integrations, * as fromIntegrations from './integrations'; import entries, * as fromEntries from './entries'; +import cursors from './cursors'; import editorialWorkflow, * as fromEditorialWorkflow from './editorialWorkflow'; import entryDraft from './entryDraft'; import collections from './collections'; @@ -17,6 +18,7 @@ const reducers = { search, integrations, entries, + cursors, editorialWorkflow, entryDraft, mediaLibrary, diff --git a/src/reducers/search.js b/src/reducers/search.js index db73abe85209..b5a31420661d 100644 --- a/src/reducers/search.js +++ b/src/reducers/search.js @@ -38,7 +38,7 @@ const entries = (state = defaultState, action) => { map.set('isFetching', false); map.set('page', page); map.set('term', searchTerm); - map.set('entryIds', page === 0 ? entryIds : map.get('entryIds', List()).concat(entryIds)); + map.set('entryIds', (!page || isNaN(page) || page === 0) ? entryIds : map.get('entryIds', List()).concat(entryIds)); }); case QUERY_REQUEST: diff --git a/src/valueObjects/Cursor.js b/src/valueObjects/Cursor.js new file mode 100644 index 000000000000..3b9f926aaf8e --- /dev/null +++ b/src/valueObjects/Cursor.js @@ -0,0 +1,115 @@ +import { fromJS, Map, Set } from "immutable"; + +const jsToMap = obj => { + if (obj === undefined) { + return Map(); + } + const immutableObj = fromJS(obj); + if (!Map.isMap(immutableObj)) { + throw new Error("Object must be equivalent to a Map."); + } + return immutableObj; +}; + +const knownMetaKeys = Set(["index", "count", "pageSize", "pageCount", "usingOldPaginationAPI"]); +const filterUnknownMetaKeys = meta => meta.filter((v, k) => knownMetaKeys.has(k)); + +/* + createCursorMap takes one of three signatures: + - () -> cursor with empty actions, data, and meta + - (cursorMap: ) -> cursor + - (actions: , data: , meta: ) -> cursor +*/ +const createCursorMap = (...args) => { + const { actions, data, meta } = args.length === 1 + ? jsToMap(args[0]).toObject() + : { actions: args[0], data: args[1], meta: args[2] }; + return Map({ + // actions are a Set, rather than a List, to ensure an efficient .has + actions: Set(actions), + + // data and meta are Maps + data: jsToMap(data), + meta: jsToMap(meta).update(filterUnknownMetaKeys), + }); +}; + +const hasAction = (cursorMap, action) => cursorMap.hasIn(["actions", action]); + +const getActionHandlers = (cursorMap, handler) => + cursorMap.get("actions", Set()).toMap().map(action => handler(action)); + +// The cursor logic is entirely functional, so this class simply +// provides a chainable interface +export default class Cursor { + static create(...args) { + return new Cursor(...args); + } + + constructor(...args) { + if (args[0] instanceof Cursor) { + return args[0]; + } + + this.store = createCursorMap(...args); + this.actions = this.store.get("actions"); + this.data = this.store.get("data"); + this.meta = this.store.get("meta"); + } + + updateStore(...args) { + return new Cursor(this.store.update(...args)); + } + updateInStore(...args) { + return new Cursor(this.store.updateIn(...args)); + } + + hasAction(action) { + return hasAction(this.store, action); + } + addAction(action) { + return this.updateStore("actions", actions => actions.add(action)); + } + removeAction(action) { + return this.updateStore("actions", actions => actions.delete(action)); + } + setActions(actions) { + return this.updateStore(store => store.set("actions", Set(actions))); + } + mergeActions(actions) { + return this.updateStore("actions", oldActions => oldActions.union(actions)); + } + getActionHandlers(handler) { + return getActionHandlers(this.store, handler); + } + + setData(data) { + return new Cursor(this.store.set("data", jsToMap(data))); + } + mergeData(data) { + return new Cursor(this.store.mergeIn(["data"], jsToMap(data))); + } + wrapData(data) { + return this.updateStore("data", oldData => jsToMap(data).set("wrapped_cursor_data", oldData)); + } + unwrapData() { + return [this.store.get("data").delete("wrapped_cursor_data"), this.updateStore("data", data => data.get("wrapped_cursor_data"))]; + } + clearData() { + return this.updateStore("data", data => Map()); + } + + setMeta(meta) { + return this.updateStore(store => store.set("meta", jsToMap(meta))); + } + mergeMeta(meta) { + return this.updateStore(store => store.update("meta", oldMeta => oldMeta.merge(jsToMap(meta)))) + } +} + +// This is a temporary hack to allow cursors to be added to the +// interface between backend.js and backends without modifying old +// backends at all. This should be removed in favor of wrapping old +// backends with a compatibility layer, as part of the backend API +// refactor. +export const CURSOR_COMPATIBILITY_SYMBOL = Symbol("cursor key for compatibility with old backends"); diff --git a/src/valueObjects/errors/APIError.js b/src/valueObjects/errors/APIError.js index 05db8b5cc4c0..fc45bea2d108 100644 --- a/src/valueObjects/errors/APIError.js +++ b/src/valueObjects/errors/APIError.js @@ -1,11 +1,12 @@ export const API_ERROR = 'API_ERROR'; export default class APIError extends Error { - constructor(message, status, api) { + constructor(message, status, api, meta={}) { super(message); this.message = message; this.status = status; this.api = api; this.name = API_ERROR; + this.meta = meta; } }