Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
G
gitlab-ce
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
Tatuya Kamada
gitlab-ce
Commits
6fb47427
Commit
6fb47427
authored
Nov 09, 2016
by
Clement Ho
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Refactor tokenizer
parent
3845bf37
Changes
2
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
115 additions
and
95 deletions
+115
-95
app/assets/javascripts/filtered_search/filtered_search_manager.js.es6
...avascripts/filtered_search/filtered_search_manager.js.es6
+25
-95
app/assets/javascripts/filtered_search/filtered_search_tokenizer.es6
...javascripts/filtered_search/filtered_search_tokenizer.es6
+90
-0
No files found.
app/assets/javascripts/filtered_search/filtered_search_manager.js.es6
View file @
6fb47427
...
@@ -18,13 +18,21 @@
...
@@ -18,13 +18,21 @@
param: 'name[]',
param: 'name[]',
}];
}];
function clearSearch(event) {
event.stopPropagation();
event.preventDefault();
document.querySelector('.filtered-search').value = '';
document.querySelector('.clear-search').classList.add('hidden');
}
function toggleClearSearchButton(event) {
function toggleClearSearchButton(event) {
const clearSearch = document.querySelector('.clear-search');
const clearSearch
Button
= document.querySelector('.clear-search');
if (event.target.value) {
if (event.target.value) {
clearSearch.classList.remove('hidden');
clearSearch
Button
.classList.remove('hidden');
} else {
} else {
clearSearch.classList.add('hidden');
clearSearch
Button
.classList.add('hidden');
}
}
}
}
...
@@ -74,105 +82,24 @@
...
@@ -74,105 +82,24 @@
class FilteredSearchManager {
class FilteredSearchManager {
constructor() {
constructor() {
this.tokenizer = new gl.FilteredSearchTokenizer(validTokenKeys);
this.bindEvents();
this.bindEvents();
loadSearchParamsFromURL();
loadSearchParamsFromURL();
this.clearTokens();
}
}
bindEvents() {
bindEvents() {
const input = document.querySelector('.filtered-search');
const filteredSearchInput = document.querySelector('.filtered-search');
const clearSearch = document.querySelector('.clear-search');
input.addEventListener('input', this.tokenize
.bind(this));
filteredSearchInput.addEventListener('input', this.processInput
.bind(this));
i
nput.addEventListener('input', toggleClearSearchButton);
filteredSearchI
nput.addEventListener('input', toggleClearSearchButton);
i
nput.addEventListener('keydown', this.checkForEnter.bind(this));
filteredSearchI
nput.addEventListener('keydown', this.checkForEnter.bind(this));
clearSearch.addEventListener('click', this.clearSearch.bind(this)
);
document.querySelector('.clear-search').addEventListener('click', clearSearch
);
}
}
clearSearch(event) {
processInput(event) {
event.stopPropagation();
event.preventDefault();
this.clearTokens();
document.querySelector('.filtered-search').value = '';
document.querySelector('.clear-search').classList.add('hidden');
}
clearTokens() {
this.tokens = [];
this.searchToken = '';
}
tokenize(event) {
// Re-calculate tokens
this.clearTokens();
const input = event.target.value;
const input = event.target.value;
const inputs = input.split(' ');
this.tokenizer.processTokens(input);
let searchTerms = '';
let lastQuotation = '';
let incompleteToken = false;
const addSearchTerm = function addSearchTerm(term) {
// Add space for next term
searchTerms += `${term} `;
};
inputs.forEach((i) => {
if (incompleteToken) {
const prevToken = this.tokens[this.tokens.length - 1];
prevToken.value += ` ${i}`;
// Remove last quotation
const lastQuotationRegex = new RegExp(lastQuotation, 'g');
prevToken.value = prevToken.value.replace(lastQuotationRegex, '');
this.tokens[this.tokens.length - 1] = prevToken;
// Check to see if this quotation completes the token value
if (i.indexOf(lastQuotation)) {
incompleteToken = !incompleteToken;
}
return;
}
const colonIndex = i.indexOf(':');
if (colonIndex !== -1) {
const tokenKey = i.slice(0, colonIndex).toLowerCase();
const tokenValue = i.slice(colonIndex + 1);
const match = validTokenKeys.find(v => v.key === tokenKey);
if (tokenValue.indexOf('"') !== -1) {
lastQuotation = '"';
incompleteToken = true;
} else if (tokenValue.indexOf('\'') !== -1) {
lastQuotation = '\'';
incompleteToken = true;
}
if (match && tokenValue.length > 0) {
this.tokens.push({
key: match.key,
value: tokenValue,
});
} else {
addSearchTerm(i);
}
} else {
addSearchTerm(i);
}
}, this);
this.searchToken = searchTerms.trim();
this.printTokens();
}
printTokens() {
console.log('tokens:');
this.tokens.forEach(token => console.log(token));
console.log(`search: ${this.searchToken}`);
}
}
checkForEnter(event) {
checkForEnter(event) {
...
@@ -193,6 +120,9 @@
...
@@ -193,6 +120,9 @@
const defaultState = 'opened';
const defaultState = 'opened';
let currentState = defaultState;
let currentState = defaultState;
const tokens = this.tokenizer.getTokens();
const searchToken = this.tokenizer.getSearchToken();
if (stateIndex !== -1) {
if (stateIndex !== -1) {
const remaining = currentPath.slice(stateIndex + 6);
const remaining = currentPath.slice(stateIndex + 6);
const separatorIndex = remaining.indexOf('&');
const separatorIndex = remaining.indexOf('&');
...
@@ -201,13 +131,13 @@
...
@@ -201,13 +131,13 @@
}
}
path += `&state=${currentState}`;
path += `&state=${currentState}`;
t
his.t
okens.forEach((token) => {
tokens.forEach((token) => {
const param = validTokenKeys.find(t => t.key === token.key).param;
const param = validTokenKeys.find(t => t.key === token.key).param;
path += `&${token.key}_${param}=${encodeURIComponent(token.value)}`;
path += `&${token.key}_${param}=${encodeURIComponent(token.value)}`;
});
});
if (
this.
searchToken) {
if (searchToken) {
path += `&search=${encodeURIComponent(
this.
searchToken)}`;
path += `&search=${encodeURIComponent(searchToken)}`;
}
}
window.location = path;
window.location = path;
...
...
app/assets/javascripts/filtered_search/filtered_search_tokenizer.es6
0 → 100644
View file @
6fb47427
/* eslint-disable no-param-reassign */
((global) => {
class FilteredSearchTokenizer {
constructor(validTokenKeys) {
this.validTokenKeys = validTokenKeys;
this.resetTokens();
}
getTokens() {
return this.tokens;
}
getSearchToken() {
return this.searchToken;
}
resetTokens() {
this.tokens = [];
this.searchToken = '';
}
printTokens() {
console.log('tokens:');
this.tokens.forEach(token => console.log(token));
console.log(`search: ${this.searchToken}`);
}
processTokens(input) {
// Re-calculate tokens
this.resetTokens();
const inputs = input.split(' ');
let searchTerms = '';
let lastQuotation = '';
let incompleteToken = false;
inputs.forEach((i) => {
if (incompleteToken) {
const prevToken = this.tokens[this.tokens.length - 1];
prevToken.value += ` ${i}`;
// Remove last quotation
const lastQuotationRegex = new RegExp(lastQuotation, 'g');
prevToken.value = prevToken.value.replace(lastQuotationRegex, '');
this.tokens[this.tokens.length - 1] = prevToken;
// Check to see if this quotation completes the token value
if (i.indexOf(lastQuotation)) {
incompleteToken = !incompleteToken;
}
return;
}
const colonIndex = i.indexOf(':');
if (colonIndex !== -1) {
const tokenKey = i.slice(0, colonIndex).toLowerCase();
const tokenValue = i.slice(colonIndex + 1);
const match = this.validTokenKeys.find(v => v.key === tokenKey);
if (tokenValue.indexOf('"') !== -1) {
lastQuotation = '"';
incompleteToken = true;
} else if (tokenValue.indexOf('\'') !== -1) {
lastQuotation = '\'';
incompleteToken = true;
}
if (match && tokenValue.length > 0) {
this.tokens.push({
key: match.key,
value: tokenValue,
});
return;
}
}
// Add space for next term
searchTerms += `${i} `;
}, this);
this.searchToken = searchTerms.trim();
this.printTokens();
}
}
global.FilteredSearchTokenizer = FilteredSearchTokenizer;
})(window.gl || (window.gl = {}));
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment