Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
J
jio
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Aurel
jio
Commits
4c418987
Commit
4c418987
authored
Jun 10, 2013
by
Tristan Cavelier
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
indexstorage.js: reworked to manage queries, increase speed and to fix some bug
parent
c65d2469
Changes
2
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
867 additions
and
1265 deletions
+867
-1265
src/jio.storage/indexstorage.js
src/jio.storage/indexstorage.js
+598
-856
test/jiotests.js
test/jiotests.js
+269
-409
No files found.
src/jio.storage/indexstorage.js
View file @
4c418987
/*
/*
* Copyright 2013, Nexedi SA
* Copyright 2013, Nexedi SA
* Released under the LGPL license.
* Released under the LGPL license.
* http://www.gnu.org/licenses/lgpl.html
* http://www.gnu.org/licenses/lgpl.html
*/
*/
/*jslint indent: 2, maxlen: 80, sloppy: true, nomen: true */
/*global jIO: true, localStorage: true, setTimeout: true */
/*jslint indent: 2, maxlen: 80, sloppy: true, nomen: true, regexp: true */
/*global jIO: true, localStorage: true, define: true, complex_queries: true */
/**
/**
* JIO Index Storage.
* JIO Index Storage.
* Manages indexes for specified storages.
* Manages indexes for specified storages.
* Description:
* Description:
* {
* {
* "type": "index",
* "type": "index",
* "indices": [
* "indices": [{
* {"indexA",["field_A"]},
* "id": "index_title_subject.json", // doc id where to store indices
* {"indexAB",["field_A","field_B"]}
* "index": ["title", "subject"] // metadata to index
* ],
* "sub_storage": <sub storage where to store index>
* "field_types": {
* (default equal to parent sub_storage field)
* "field_A": "dateTime",
* }, {
* "field_B": "string"
* "id": "index_year.json",
* "index": "year"
* ...
* }],
* "sub_storage": <sub storage description>
* }
*
* Sent document metadata will be:
* index_titre_subject.json
* {
* "_id": "index_title_subject.json",
* "indexing": ["title", "subject"],
* "free": [0],
* "location": {
* "foo": 1,
* "bar": 2,
* ...
* },
* },
* "storage": [
* "database": [
* <sub storage description>,
* {},
* {"_id": "foo", "title": "...", "subject": ...},
* {"_id": "bar", "title": "...", "subject": ...},
* ...
* ...
* ]
* ]
* }
* }
* Index file will contain
*
* index_year.json
* {
* {
* "_id": "app-name_indices.json",
* "_id": "index_year.json",
* "indexA":
* "indexing": ["year"],
* "fieldA": {
* "free": [1],
* "keyword_abc": ["some_id","some_other_id",...]
* "location": {
* }
* "foo": 0,
* },
* "bar": 2,
* "indexAB": {
* ...
* "fieldA": {
* "keyword_abc": ["some_id"]
* },
* },
* "fieldB": {
* "database": [
* "keyword_def": ["some_id"]
* {"_id": "foo", "year": "..."},
* }
* {},
* }
* {"_id": "bar", "year": "..."},
* ...
* ]
* }
* }
* NOTES:
* It may be difficult to "un-sort" multi-field indices, like
* indexAB, because all keywords will be listed regrardless
* of underlying field, so an index on author and year would produce
* two entries per record like:
*
*
*
"William Shakespeare":["id_Romeo_and_Juliet", "id_Othello"],
*
A put document will be indexed to the free location if exist, else it will be
*
"1591":["id_Romeo_and_Juliet"],
*
indexed at the end of the database. The document id will be indexed, also, in
*
"1603":["id_Othello"]
*
'location' to quickly replace metadata.
*
*
* So for direct lookups, this should be convient, but for other types
* Only one or two loops are executed:
* of queries, it depends
* - one to filter retrieved document list (no query -> no loop)
* - one to format the result to a JIO response
*/
*/
jIO
.
addStorageType
(
'
indexed
'
,
function
(
spec
,
my
)
{
(
function
()
{
"
use strict
"
;
"
use strict
"
;
var
that
,
priv
=
{};
spec
=
spec
||
{};
that
=
my
.
basicStorage
(
spec
,
my
);
priv
.
indices
=
spec
.
indices
;
priv
.
field_types
=
spec
.
field_types
;
priv
.
substorage_key
=
"
sub_storage
"
;
priv
.
substorage
=
spec
[
priv
.
substorage_key
];
priv
.
index_indicator
=
spec
.
sub_storage
.
application_name
||
"
index
"
;
priv
.
index_suffix
=
priv
.
index_indicator
+
"
_indices.json
"
;
my
.
env
=
my
.
env
||
spec
.
env
||
{};
var
error_dict
=
{
"
Corrupted Index
"
:
{
"
status
"
:
24
,
"
statusText
"
:
"
Corrupt
"
,
"
error
"
:
"
corrupt
"
,
"
reason
"
:
"
corrupted index database
"
},
"
Corrupted Metadata
"
:
{
"
status
"
:
24
,
"
statusText
"
:
"
Corrupt
"
,
"
error
"
:
"
corrupt
"
,
"
reason
"
:
"
corrupted document
"
},
"
Not Found
"
:
{
"
status
"
:
404
,
"
statusText
"
:
"
Not Found
"
,
"
error
"
:
"
not_found
"
,
"
reason
"
:
"
missing document
"
},
"
Conflict
"
:
{
"
status
"
:
409
,
"
statusText
"
:
"
Conflicts
"
,
"
error
"
:
"
conflicts
"
,
"
reason
"
:
"
already exist
"
}
};
that
.
specToStore
=
function
()
{
/**
var
o
=
{};
* Generate a JIO Error Object
o
[
priv
.
substorage_key
]
=
priv
.
substorage
;
*
o
.
env
=
my
.
env
;
* @method generateErrorObject
return
o
;
* @param {String} name The error name
* @param {String} message The error message
* @param {String} [reason] The error reason
* @return {Object} A jIO error object
*/
function
generateErrorObject
(
name
,
message
,
reason
)
{
if
(
!
error_dict
[
name
])
{
return
{
"
status
"
:
0
,
"
statusText
"
:
"
Unknown
"
,
"
error
"
:
"
unknown
"
,
"
message
"
:
message
,
"
reason
"
:
reason
||
"
unknown
"
};
};
}
return
{
"
status
"
:
error_dict
[
name
].
status
,
"
statusText
"
:
error_dict
[
name
].
statusText
,
"
error
"
:
error_dict
[
name
].
error
,
"
message
"
:
message
,
"
reason
"
:
reason
||
error_dict
[
name
].
reason
};
}
/**
* Get the real type of an object
* @method type
* @param {Any} value The value to check
* @return {String} The value type
*/
function
type
(
value
)
{
// returns "String", "Object", "Array", "RegExp", ...
return
(
/^
\[
object
([
a-zA-Z
]
+
)\]
$/
).
exec
(
Object
.
prototype
.
toString
.
call
(
value
)
)[
1
];
}
/**
/**
* Generate a new uuid
* Generate a new uuid
* @method generateUuid
* @method generateUuid
* @return {string} The new uuid
* @return {string} The new uuid
*/
*/
priv
.
generateUuid
=
function
()
{
function
generateUuid
()
{
var
S4
=
function
()
{
var
S4
=
function
()
{
var
i
,
string
=
Math
.
floor
(
var
i
,
string
=
Math
.
floor
(
Math
.
random
()
*
0x10000
/* 65536 */
Math
.
random
()
*
0x10000
/* 65536 */
...
@@ -98,610 +162,428 @@ jIO.addStorageType('indexed', function (spec, my) {
...
@@ -98,610 +162,428 @@ jIO.addStorageType('indexed', function (spec, my) {
S4
()
+
"
-
"
+
S4
()
+
"
-
"
+
S4
()
+
"
-
"
+
S4
()
+
"
-
"
+
S4
()
+
S4
()
+
S4
();
S4
()
+
S4
()
+
S4
();
}
;
}
/**
/**
* Get number of elements in object
* A JSON Index manipulator
* @method getObjectSize
*
* @param {object} obj The object to check
* @class JSONIndex
* @return {number} size The amount of elements in the object
* @constructor
*/
*/
priv
.
getObjectSize
=
function
(
obj
)
{
function
JSONIndex
(
spec
)
{
var
size
=
0
,
key
;
var
that
=
this
;
for
(
key
in
obj
)
{
spec
=
spec
||
{};
if
(
obj
.
hasOwnProperty
(
key
))
{
size
+=
1
;
}
}
return
size
;
};
/**
/**
* Creates an empty indices array
* The document id
* @method createEmptyIndexArray
*
* @param {array} indices An array of indices (optional)
* @property _id
* @return {object} The new index array
* @type String
*/
*/
priv
.
createEmptyIndexArray
=
function
(
indices
)
{
that
.
_id
=
spec
.
_id
;
var
i
,
k
,
j
=
priv
.
indices
.
length
,
new_index
,
new_index_object
=
{},
new_index_name
,
new_index_fields
;
if
(
indices
===
undefined
)
{
/**
for
(
i
=
0
;
i
<
j
;
i
+=
1
)
{
* The array with metadata key to index
new_index
=
priv
.
indices
[
i
];
*
new_index_name
=
new_index
.
name
;
* @property _indexing
new_index_fields
=
new_index
.
fields
;
* @type Array
new_index_object
[
new_index_name
]
=
{};
*/
that
.
_indexing
=
spec
.
indexing
||
[];
// loop index fields and add objects to hold value/id pairs
/**
for
(
k
=
0
;
k
<
new_index_fields
.
length
;
k
+=
1
)
{
* The array of free location index
new_index_object
[
new_index_name
][
new_index_fields
[
k
]]
=
{};
*
}
* @property _free
}
* @type Array
}
* @default []
return
new_index_object
;
*/
};
that
.
_free
=
spec
.
free
||
[];
/**
* The dictionnary document id -> database index
*
* @property _location
* @type Object
* @default {}
*/
that
.
_location
=
spec
.
location
||
{};
/**
/**
* Determine if a key/value pair exists in an object by VALUE
* The database array containing document metadata
* @method searchObjectByValue
*
* @param {object} indexToSearch The index to search
* @property _database
* @param {string} docid The document id to find
* @type Array
* @param {string} passback The value that should be returned
* @default []
* @return {boolean} true/false
*/
*/
priv
.
searchIndexByValue
=
function
(
indexToSearch
,
docid
,
passback
)
{
that
.
_database
=
spec
.
database
||
[];
var
key
,
obj
,
prop
;
for
(
key
in
indexToSearch
)
{
/**
if
(
indexToSearch
.
hasOwnProperty
(
key
))
{
* Adds a metadata object in the database, replace if already exist
obj
=
indexToSearch
[
key
];
*
for
(
prop
in
obj
)
{
* @method put
if
(
obj
[
prop
]
===
docid
)
{
* @param {Object} meta The metadata to add
return
passback
===
"
bool
"
?
true
:
key
;
* @return {Boolean} true if added, false otherwise
}
*/
that
.
put
=
function
(
meta
)
{
var
underscored_meta_re
=
/^_.*$/
,
k
,
needed_meta
=
{},
ok
=
false
;
if
(
typeof
meta
.
_id
!==
"
string
"
&&
meta
.
_id
!==
""
)
{
throw
new
TypeError
(
"
Corrupted Metadata
"
);
}
for
(
k
in
meta
)
{
if
(
meta
.
hasOwnProperty
(
k
))
{
if
(
underscored_meta_re
.
test
(
k
))
{
needed_meta
[
k
]
=
meta
[
k
];
}
else
if
(
that
.
_indexing_object
[
k
])
{
needed_meta
[
k
]
=
meta
[
k
];
ok
=
true
;
}
}
}
if
(
ok
)
{
if
(
typeof
that
.
_location
[
meta
.
_id
]
===
"
number
"
)
{
that
.
_database
[
that
.
_location
[
meta
.
_id
]]
=
needed_meta
;
}
else
if
(
that
.
_free
.
length
>
0
)
{
k
=
that
.
_free
.
shift
();
that
.
_database
[
k
]
=
needed_meta
;
that
.
_location
[
meta
.
_id
]
=
k
;
}
else
{
that
.
_database
.
push
(
needed_meta
);
that
.
_location
[
meta
.
_id
]
=
that
.
_database
.
length
-
1
;
}
}
return
true
;
}
}
if
(
typeof
that
.
_location
[
meta
.
_id
]
===
"
number
"
)
{
that
.
remove
(
meta
);
}
}
return
false
;
return
false
;
};
};
/**
/**
* Get element position in array
* Removes a metadata object from the database if exist
* @method getPositionInArray
*
* @param {object} indices The index file
* @method remove
* @param {object} indices The index file
* @param {Object} meta The metadata to remove
* @returns {number} i Position of element in array
*/
*/
priv
.
getPositionInArray
=
function
(
element
,
array
)
{
that
.
remove
=
function
(
meta
)
{
var
i
,
l
=
array
.
length
;
if
(
typeof
meta
.
_id
!==
"
string
"
)
{
for
(
i
=
0
;
i
<
l
;
i
+=
1
)
{
throw
new
TypeError
(
"
Corrupted Metadata
"
);
if
(
array
[
i
]
===
element
)
{
return
i
;
}
}
if
(
typeof
that
.
_location
[
meta
.
_id
]
!==
"
number
"
)
{
throw
new
ReferenceError
(
"
Not Found
"
);
}
}
return
null
;
that
.
_database
[
that
.
_location
[
meta
.
_id
]]
=
null
;
that
.
_free
.
push
(
that
.
_location
[
meta
.
_id
]);
delete
that
.
_location
[
meta
.
_id
];
};
};
/**
/**
* Find id in indices
* Checks if the index document is correct
* @method isDocidInIndex
*
* @param {object} indices The file containing the indeces
* @method check
* @param {object} doc The document which should be added to the index
* @return {boolean} true/false
*/
*/
priv
.
isDocidInIndex
=
function
(
indices
,
doc
)
{
that
.
check
=
function
()
{
var
index
,
i
,
j
,
label
,
l
=
priv
.
indices
.
length
;
var
id
,
database_meta
;
if
(
typeof
that
.
_id
!==
"
string
"
||
// loop indices
that
.
_id
===
""
||
for
(
i
=
0
;
i
<
l
;
i
+=
1
)
{
type
(
that
.
_free
)
!==
"
Array
"
||
index
=
{};
type
(
that
.
_indexing
)
!==
"
Array
"
||
index
.
reference
=
priv
.
indices
[
i
];
type
(
that
.
_location
)
!==
"
Object
"
||
index
.
reference_size
=
index
.
reference
.
fields
.
length
;
type
(
that
.
_database
)
!==
"
Array
"
||
index
.
current
=
indices
[
index
.
reference
.
name
];
that
.
_indexing
.
length
===
0
)
{
throw
new
TypeError
(
"
Corrupted Index
"
);
for
(
j
=
0
;
j
<
index
.
reference_size
;
j
+=
1
)
{
}
label
=
index
.
reference
.
fields
[
j
];
for
(
id
in
that
.
_location
)
{
index
.
current_size
=
priv
.
getObjectSize
(
index
.
current
[
label
]);
if
(
that
.
_location
.
hasOwnProperty
(
id
))
{
database_meta
=
that
.
_database
[
that
.
_location
[
id
]];
// check for existing entries to remove (put-update)
if
(
type
(
database_meta
)
!==
"
Object
"
||
if
(
index
.
current_size
>
0
)
{
database_meta
.
_id
!==
id
)
{
if
(
priv
.
searchIndexByValue
(
index
.
current
[
label
],
doc
.
_id
,
"
bool
"
))
{
throw
new
TypeError
(
"
Corrupted Index
"
);
return
true
;
}
}
}
}
}
}
}
return
false
;
};
};
/**
/**
* Clean up indexes when removing a file
* Recreates database indices and remove free space
* @method cleanIndices
*
* @param {object} indices The file containing the indeces
* @method repair
* @param {object} doc The document which should be added to the index
* @return {object} indices The cleaned up file
*/
*/
priv
.
cleanIndices
=
function
(
indices
,
doc
)
{
that
.
repair
=
function
()
{
var
i
,
j
,
k
,
index
,
key
,
label
,
l
=
priv
.
indices
.
length
;
var
i
=
0
,
meta
;
that
.
_free
=
[];
// loop indices (indexA, indexAB...)
that
.
_location
=
{};
for
(
i
=
0
;
i
<
l
;
i
+=
1
)
{
if
(
type
(
that
.
_database
)
!==
"
Array
"
)
{
index
=
{};
that
.
_database
=
[];
index
.
reference
=
priv
.
indices
[
i
];
}
index
.
reference_size
=
index
.
reference
.
fields
.
length
;
while
(
i
<
that
.
_database
.
length
)
{
index
.
current
=
indices
[
index
.
reference
.
name
];
meta
=
that
.
_database
[
i
];
if
(
type
(
meta
)
===
"
Object
"
&&
// loop index fields
typeof
meta
.
_id
===
"
string
"
&&
meta
.
_id
!==
""
&&
for
(
j
=
0
;
j
<
index
.
reference_size
;
j
+=
1
)
{
!
that
.
_location
[
meta
.
_id
])
{
label
=
index
.
reference
.
fields
[
j
];
that
.
_location
[
meta
.
_id
]
=
i
;
index
.
current_size
=
priv
.
getObjectSize
(
index
.
current
[
label
]);
i
+=
1
;
// loop field entries
for
(
k
=
0
;
k
<
index
.
current_size
;
k
+=
1
)
{
key
=
priv
.
searchIndexByValue
(
index
.
current
[
label
],
doc
.
_id
,
"
key
"
);
index
.
result_array
=
index
.
current
[
label
][
key
];
if
(
!!
key
)
{
// if there is more than one docid in the result array,
// just remove this one and not the whole array
if
(
index
.
result_array
.
length
>
1
)
{
index
.
result_array
.
splice
(
k
,
1
);
}
else
{
}
else
{
delete
index
.
current
[
label
][
key
];
that
.
_database
.
splice
(
i
,
1
);
}
}
}
}
}
}
}
return
indices
;
};
};
/**
/**
* Adds entries to indices
* Returns the serialized version of this object (not cloned)
* @method createEmptyIndexArray
*
* @param {object} indices The file containing the indeces
* @method serialized
* @param {object} doc The document which should be added to the index
* @return {Object} The serialized version
*/
*/
priv
.
updateIndices
=
function
(
indices
,
doc
)
{
that
.
serialized
=
function
()
{
var
i
,
j
,
index
,
value
,
label
,
key
,
l
=
priv
.
indices
.
length
;
return
{
"
_id
"
:
that
.
_id
,
// loop indices
"
indexing
"
:
that
.
_indexing
,
for
(
i
=
0
;
i
<
l
;
i
+=
1
)
{
"
free
"
:
that
.
_free
,
index
=
{};
"
location
"
:
that
.
_location
,
index
.
reference
=
priv
.
indices
[
i
];
"
database
"
:
that
.
_database
index
.
reference_size
=
index
.
reference
.
fields
.
length
;
};
index
.
current
=
indices
[
index
.
reference
.
name
];
// build array of values to create entries in index
for
(
j
=
0
;
j
<
index
.
reference_size
;
j
+=
1
)
{
label
=
index
.
reference
.
fields
[
j
];
value
=
doc
[
label
];
if
(
value
!==
undefined
)
{
index
.
current_size
=
priv
.
getObjectSize
(
index
.
current
[
label
]);
// check for existing entries to remove (put-update)
if
(
index
.
current_size
>
0
)
{
key
=
priv
.
searchIndexByValue
(
index
.
current
[
label
],
doc
.
_id
,
"
key
"
);
if
(
!!
key
)
{
delete
index
.
current
[
label
][
key
];
}
}
if
(
index
.
current
[
label
][
value
]
===
undefined
)
{
index
.
current
[
label
][
value
]
=
[];
}
// add a new entry
index
.
current
[
label
][
value
].
push
(
doc
.
_id
);
}
}
}
return
indices
;
};
};
that
.
check
();
that
.
_indexing_object
=
{};
that
.
_indexing
.
forEach
(
function
(
meta_key
)
{
that
.
_indexing_object
[
meta_key
]
=
true
;
});
}
/**
/**
* Check available indices to find the best one.
* The JIO index storage constructor
* TODOS: NOT NICE, redo
* @method findBestIndexForQuery
* @param {object} syntax of query
* @returns {object} response The query object constructed from Index file
*/
*/
priv
.
findBestIndexForQuery
=
function
(
syntax
)
{
function
indexStorage
(
spec
,
my
)
{
var
i
,
j
,
k
,
l
,
n
,
p
,
o
,
element
,
key
,
block
,
var
that
,
priv
=
{};
search_ids
,
use_index
=
[],
select_ids
=
{},
index
,
query_param
,
current_query
,
current_query_size
;
// try to parse into object
if
(
syntax
.
query
!==
undefined
)
{
current_query
=
jIO
.
ComplexQueries
.
parse
(
syntax
.
query
);
}
else
{
current_query
=
{};
current_query_size
=
0
;
}
// loop indices
that
=
my
.
basicStorage
(
spec
,
my
);
for
(
i
=
0
;
i
<
priv
.
indices
.
length
;
i
+=
1
)
{
search_ids
=
[];
block
=
false
;
index
=
{};
index
.
reference
=
priv
.
indices
[
i
];
index
.
reference_size
=
index
.
reference
.
fields
.
length
;
if
(
current_query_size
!==
0
)
{
// rebuild search_ids for iteration
if
(
current_query
.
query_list
===
undefined
)
{
search_ids
.
push
(
current_query
.
id
);
}
else
{
for
(
j
=
0
;
j
<
current_query
.
query_list
.
length
;
j
+=
1
)
{
if
(
priv
.
getPositionInArray
(
current_query
.
query_list
[
j
].
id
,
search_ids
)
===
null
)
{
search_ids
.
push
(
current_query
.
query_list
[
j
].
id
);
}
}
}
// loop search ids and find matches in index
priv
.
indices
=
spec
.
indices
;
for
(
k
=
0
;
k
<
search_ids
.
length
;
k
+=
1
)
{
priv
.
sub_storage
=
spec
.
sub_storage
;
query_param
=
search_ids
[
0
];
for
(
l
=
0
;
l
<
index
.
reference_size
;
l
+=
1
)
{
if
(
query_param
===
index
.
reference
.
fields
[
l
])
{
search_ids
.
splice
(
priv
.
getPositionInArray
(
query_param
,
search_ids
),
1
);
}
}
}
}
// rebuild select_ids
// Overrides
for
(
o
=
0
;
o
<
syntax
.
filter
.
select_list
.
length
;
o
+=
1
)
{
element
=
syntax
.
filter
.
select_list
[
o
];
select_ids
[
element
]
=
true
;
}
// search_ids empty = all needed search fields found on index
that
.
specToStore
=
function
()
{
if
(
search_ids
.
length
===
0
)
{
return
{
p
=
priv
.
getObjectSize
(
select_ids
);
"
indices
"
:
priv
.
indices
,
if
(
p
===
0
)
{
"
sub_storage
"
:
priv
.
sub_storage
use_index
.
push
({
};
"
name
"
:
index
.
reference
.
name
,
"
search
"
:
true
,
"
results
"
:
false
});
}
else
{
for
(
n
=
0
;
n
<
index
.
reference_size
;
n
+=
1
)
{
delete
select_ids
[
index
.
reference
.
fields
[
n
]];
}
for
(
key
in
select_ids
)
{
if
(
select_ids
.
hasOwnProperty
(
key
))
{
use_index
.
push
({
"
name
"
:
index
.
reference
.
name
,
"
search
"
:
true
,
"
results
"
:
false
});
block
=
true
;
}
}
if
(
block
===
false
)
{
use_index
.
push
({
"
name
"
:
index
.
reference
.
name
,
"
search
"
:
true
,
"
results
"
:
true
});
}
}
}
}
return
use_index
;
};
};
/**
/**
* Converts the indices file into an object usable by complex queries
* Return the similarity percentage (1 >= p >= 0) between two index lists.
* @method constructQueryObject
*
* @param {object} indices The index file
* @method similarityPercentage
* @returns {object} response The query object constructed from Index file
* @param {Array} list_a An index list
* @param {Array} list_b Another index list
* @return {Number} The similarity percentage
*/
*/
priv
.
constructQueryObject
=
function
(
indices
,
query_syntax
)
{
priv
.
similarityPercentage
=
function
(
list_a
,
list_b
)
{
var
j
,
k
,
l
,
m
,
n
,
use_index
,
index
,
var
ai
,
bi
,
count
=
0
;
index_name
,
field_names
,
field
,
key
,
element
,
for
(
ai
=
0
;
ai
<
list_a
.
length
;
ai
+=
1
)
{
query_index
,
query_object
=
[],
field_name
,
for
(
bi
=
0
;
bi
<
list_b
.
length
;
bi
+=
1
)
{
entry
;
if
(
list_a
[
ai
]
===
list_b
[
bi
])
{
count
+=
1
;
// returns index-to-use|can-do-query|can-do-query-and-results
use_index
=
priv
.
findBestIndexForQuery
(
query_syntax
);
if
(
use_index
.
length
>
0
)
{
for
(
j
=
0
;
j
<
use_index
.
length
;
j
+=
1
)
{
index
=
use_index
[
j
];
// NOTED: the index could be used to:
// (a) get all document ids matching query
// (b) get all document ids and results (= run complex query on index)
// right now, only (b) is supported, because the complex query is
// a single step process. If it was possible to first get the
// relevant document ids, then get the results, the index could be
// used to do the first step plus use GET on the returned documents
if
(
index
.
search
&&
index
.
results
)
{
index_name
=
use_index
[
j
].
name
;
query_index
=
indices
[
index_name
];
// get fieldnames from this index
for
(
k
=
0
;
k
<
priv
.
indices
.
length
;
k
+=
1
)
{
if
(
priv
.
indices
[
k
].
name
===
use_index
[
j
].
name
)
{
field_names
=
priv
.
indices
[
k
].
fields
;
}
}
for
(
l
=
0
;
l
<
field_names
.
length
;
l
+=
1
)
{
field_name
=
field_names
[
l
];
// loop entries for this field name
field
=
query_index
[
field_name
];
for
(
key
in
field
)
{
if
(
field
.
hasOwnProperty
(
key
))
{
element
=
field
[
key
];
// key can be "string" or "number" right now
if
(
priv
.
field_types
[
field_name
]
===
"
number
"
)
{
key
=
+
key
;
}
for
(
m
=
0
;
m
<
element
.
length
;
m
+=
1
)
{
if
(
priv
.
searchIndexByValue
(
query_object
,
element
[
m
],
"
bool
"
))
{
// loop object
for
(
n
=
0
;
n
<
query_object
.
length
;
n
+=
1
)
{
entry
=
query_object
[
n
];
if
(
entry
.
id
===
element
[
m
])
{
entry
[
field_name
]
=
key
;
}
}
}
else
{
entry
=
{};
entry
.
id
=
element
[
m
];
entry
[
field_name
]
=
key
;
query_object
.
push
(
entry
);
}
}
}
}
}
}
}
}
}
return
count
/
(
list_a
.
length
>
list_b
.
length
?
list_a
.
length
:
list_b
.
length
);
};
/**
* Select the good index to use according to a select list.
*
* @method selectIndex
* @param {Array} select_list An array of strings
* @return {Number} The index index
*/
priv
.
selectIndex
=
function
(
select_list
)
{
var
i
,
tmp
,
selector
=
{
"
index
"
:
0
,
"
similarity
"
:
0
};
for
(
i
=
0
;
i
<
priv
.
indices
.
length
;
i
+=
1
)
{
tmp
=
priv
.
similarityPercentage
(
select_list
,
priv
.
indices
[
i
].
index
);
if
(
tmp
>
selector
.
similarity
)
{
selector
.
index
=
i
;
selector
.
similarity
=
tmp
;
}
}
}
}
return
query_object
;
return
selector
.
index
;
};
};
/**
/**
* Build the alldocs response from the index file (overriding substorage)
* Get a database
* @method allDocsResponseFromIndex
*
* @param {object} command The JIO command
* @method getIndexDatabase
* @param {boolean} include_docs Whether to also supply the document
* @param {Object} option The command option
* @param {object} option The options set for this method
* @param {Number} number The location in priv.indices
* @returns {object} response The allDocs response
* @param {Function} callback The callback
*/
*/
priv
.
allDocsResponseFromIndex
=
function
(
indices
,
include_docs
,
option
)
{
priv
.
getIndexDatabase
=
function
(
option
,
number
,
callback
)
{
var
i
,
j
,
k
,
m
,
n
=
0
,
l
=
priv
.
indices
.
length
,
that
.
addJob
(
index
,
key
,
obj
,
prop
,
found
,
file
,
label
,
"
get
"
,
unique_count
=
0
,
unique_docids
=
[],
all_doc_response
=
{},
priv
.
indices
[
number
].
sub_storage
||
priv
.
sub_storage
,
success
=
function
(
content
)
{
{
"
_id
"
:
priv
.
indices
[
number
].
id
},
file
=
{
value
:
{}
};
option
,
file
.
id
=
unique_docids
[
n
];
function
(
response
)
{
file
.
key
=
unique_docids
[
n
];
callback
(
new
JSONIndex
(
response
));
file
.
doc
=
content
;
all_doc_response
.
rows
.
push
(
file
);
// async counter, must be in callback
n
+=
1
;
if
(
n
===
unique_count
)
{
that
.
success
(
all_doc_response
);
}
},
},
error
=
function
()
{
function
(
err
)
{
that
.
error
({
if
(
err
.
status
===
404
)
{
"
status
"
:
404
,
callback
(
new
JSONIndex
({
"
statusText
"
:
"
Not Found
"
,
"
_id
"
:
priv
.
indices
[
number
].
id
,
"
error
"
:
"
not_found
"
,
"
indexing
"
:
priv
.
indices
[
number
].
index
"
message
"
:
"
Cannot find the document
"
,
}));
"
reason
"
:
"
Cannot get a document from substorage
"
});
return
;
return
;
};
// loop indices
for
(
i
=
0
;
i
<
l
;
i
+=
1
)
{
index
=
{};
index
.
reference
=
priv
.
indices
[
i
];
index
.
reference_size
=
index
.
reference
.
fields
.
length
;
index
.
current
=
indices
[
index
.
reference
.
name
];
// a lot of loops, not sure this is the fastest way
// loop index fields
for
(
j
=
0
;
j
<
index
.
reference_size
;
j
+=
1
)
{
label
=
index
.
reference
.
fields
[
j
];
index
.
current_field
=
index
.
current
[
label
];
index
.
current_size
=
priv
.
getObjectSize
(
index
.
current_field
);
// loop field id array
for
(
j
=
0
;
j
<
index
.
current_size
;
j
+=
1
)
{
for
(
key
in
index
.
current_field
)
{
if
(
index
.
current_field
.
hasOwnProperty
(
key
))
{
obj
=
index
.
current_field
[
key
];
for
(
prop
in
obj
)
{
if
(
obj
.
hasOwnProperty
(
prop
))
{
for
(
k
=
0
;
k
<
unique_docids
.
length
;
k
+=
1
)
{
if
(
obj
[
prop
]
===
unique_docids
[
k
])
{
found
=
true
;
break
;
}
}
if
(
!
found
)
{
unique_docids
.
push
(
obj
[
prop
]);
unique_count
+=
1
;
}
}
}
}
}
err
.
message
=
"
Unable to get index database.
"
;
that
.
error
(
err
);
}
}
);
};
/**
* Gets a list containing all the databases set in the storage description.
*
* @method getIndexDatabaseList
* @param {Object} option The command option
* @param {Function} callback The result callback(database_list)
*/
priv
.
getIndexDatabaseList
=
function
(
option
,
callback
)
{
var
i
,
count
=
0
,
callbacks
=
{},
response_list
=
[];
callbacks
.
error
=
function
(
index
)
{
return
function
(
err
)
{
if
(
err
.
status
===
404
)
{
response_list
[
index
]
=
new
JSONIndex
({
"
_id
"
:
priv
.
indices
[
index
].
id
,
"
indexing
"
:
priv
.
indices
[
index
].
index
});
count
+=
1
;
if
(
count
===
priv
.
indices
.
length
)
{
callback
(
response_list
);
}
}
return
;
}
}
err
.
message
=
"
Unable to get index database.
"
;
that
.
error
(
err
);
};
};
callbacks
.
success
=
function
(
index
)
{
return
function
(
response
)
{
response_list
[
index
]
=
new
JSONIndex
(
response
);
count
+=
1
;
if
(
count
===
priv
.
indices
.
length
)
{
callback
(
response_list
);
}
}
};
// construct allDocs response
};
all_doc_response
.
total_rows
=
unique_count
;
for
(
i
=
0
;
i
<
priv
.
indices
.
length
;
i
+=
1
)
{
all_doc_response
.
rows
=
[];
for
(
m
=
0
;
m
<
unique_count
;
m
+=
1
)
{
// include_docs
if
(
include_docs
)
{
that
.
addJob
(
that
.
addJob
(
"
get
"
,
"
get
"
,
priv
.
sub
storage
,
priv
.
indices
[
i
].
sub_storage
||
priv
.
sub_
storage
,
unique_docids
[
m
]
,
{
"
_id
"
:
priv
.
indices
[
i
].
id
}
,
option
,
option
,
success
,
callbacks
.
success
(
i
)
,
error
callbacks
.
error
(
i
)
);
);
}
else
{
file
=
{
value
:
{}
};
file
.
id
=
unique_docids
[
m
];
file
.
key
=
unique_docids
[
m
];
all_doc_response
.
rows
.
push
(
file
);
if
(
m
===
(
unique_count
-
1
))
{
return
all_doc_response
;
}
}
};
/**
* Saves all the databases to the remote(s).
*
* @method storeIndexDatabaseList
* @param {Array} database_list The database list
* @param {Object} option The command option
* @param {Function} callback The result callback(err, response)
*/
priv
.
storeIndexDatabaseList
=
function
(
database_list
,
option
,
callback
)
{
var
i
,
count
=
0
,
onResponse
,
onError
;
onResponse
=
function
(
response
)
{
count
+=
1
;
if
(
count
===
priv
.
indices
.
length
)
{
callback
({
"
ok
"
:
true
});
}
}
};
onError
=
function
(
err
)
{
err
.
message
=
"
Unable to store index database.
"
;
that
.
error
(
err
);
};
for
(
i
=
0
;
i
<
priv
.
indices
.
length
;
i
+=
1
)
{
that
.
addJob
(
"
put
"
,
priv
.
indices
[
i
].
sub_storage
||
priv
.
sub_storage
,
database_list
[
i
].
serialized
(),
option
,
onResponse
,
onError
);
}
}
};
};
/**
/**
* Post document to substorage and create/update index file(s)
* A generic request method which delegates the request to the sub storage.
* @method post
* On response, it will index the document from the request and update all
* @param {object} command The JIO command
* the databases.
* @param {string} source The source of the function call
*
* @method genericRequest
* @param {Command} command The JIO command
* @param {Function} method The request method
*/
*/
priv
.
postOrPut
=
function
(
command
,
source
)
{
priv
.
genericRequest
=
function
(
command
,
method
)
{
var
f
=
{},
indices
,
doc
;
var
doc
=
command
.
cloneDoc
(),
option
=
command
.
cloneOption
();
doc
=
command
.
cloneDoc
();
if
(
typeof
doc
.
_id
!==
"
string
"
)
{
doc
.
_id
=
priv
.
generateUuid
();
}
f
.
getIndices
=
function
()
{
var
option
=
command
.
cloneOption
();
that
.
addJob
(
that
.
addJob
(
"
get
"
,
method
,
priv
.
substorage
,
priv
.
sub
_
storage
,
{
"
_id
"
:
priv
.
index_suffix
}
,
doc
,
option
,
option
,
function
(
response
)
{
function
(
response
)
{
indices
=
response
;
switch
(
method
)
{
f
.
postDocument
(
"
put
"
);
case
"
post
"
:
},
case
"
put
"
:
function
(
err
)
{
case
"
remove
"
:
switch
(
err
.
status
)
{
doc
.
_id
=
response
.
id
;
case
404
:
priv
.
getIndexDatabaseList
(
option
,
function
(
database_list
)
{
if
(
source
!==
'
PUTATTACHMENT
'
)
{
var
i
;
indices
=
priv
.
createEmptyIndexArray
();
switch
(
method
)
{
f
.
postDocument
(
"
post
"
);
case
"
post
"
:
}
else
{
case
"
put
"
:
that
.
error
({
for
(
i
=
0
;
i
<
database_list
.
length
;
i
+=
1
)
{
"
status
"
:
404
,
database_list
[
i
].
put
(
doc
);
"
statusText
"
:
"
Not Found
"
,
}
"
error
"
:
"
not found
"
,
break
;
"
message
"
:
"
Document not found
"
,
case
"
remove
"
:
"
reason
"
:
"
Document not found
"
for
(
i
=
0
;
i
<
database_list
.
length
;
i
+=
1
)
{
});
database_list
[
i
].
remove
(
doc
);
return
;
}
}
break
;
break
;
default
:
default
:
err
.
message
=
"
Cannot retrieve index array
"
;
that
.
error
(
err
);
break
;
break
;
}
}
}
priv
.
storeIndexDatabaseList
(
database_list
,
option
,
function
()
{
);
that
.
success
({
"
ok
"
:
true
,
"
id
"
:
doc
.
_id
});
};
f
.
postDocument
=
function
(
index_update_method
)
{
if
(
priv
.
isDocidInIndex
(
indices
,
doc
)
&&
source
===
'
POST
'
)
{
// POST the document already exists
that
.
error
({
"
status
"
:
409
,
"
statusText
"
:
"
Conflicts
"
,
"
error
"
:
"
conflicts
"
,
"
message
"
:
"
Cannot create a new document
"
,
"
reason
"
:
"
Document already exists
"
});
return
;
}
if
(
source
!==
'
PUTATTACHMENT
'
)
{
indices
=
priv
.
updateIndices
(
indices
,
doc
);
}
that
.
addJob
(
source
===
'
PUTATTACHMENT
'
?
"
putAttachment
"
:
"
post
"
,
priv
.
substorage
,
doc
,
command
.
cloneOption
(),
function
()
{
if
(
source
!==
'
PUTATTACHMENT
'
)
{
f
.
sendIndices
(
index_update_method
);
}
else
{
that
.
success
({
"
ok
"
:
true
,
"
id
"
:
doc
.
_id
,
"
attachment
"
:
doc
.
_attachment
});
});
}
},
function
(
err
)
{
switch
(
err
.
status
)
{
case
409
:
// file already exists
if
(
source
!==
'
PUTATTACHMENT
'
)
{
f
.
sendIndices
(
index_update_method
);
}
else
{
that
.
success
({
"
ok
"
:
true
,
"
id
"
:
doc
.
_id
});
});
}
break
;
break
;
default
:
default
:
err
.
message
=
"
Cannot upload document
"
;
that
.
success
(
response
);
that
.
error
(
err
);
break
;
break
;
}
}
}
);
};
f
.
sendIndices
=
function
(
method
)
{
indices
.
_id
=
priv
.
index_suffix
;
that
.
addJob
(
method
,
priv
.
substorage
,
indices
,
command
.
cloneOption
(),
function
()
{
that
.
success
({
"
ok
"
:
true
,
"
id
"
:
doc
.
_id
});
},
},
function
(
err
)
{
function
(
err
)
{
// xxx do we try to delete the posted document ?
return
that
.
error
(
err
);
err
.
message
=
"
Cannot save index file
"
;
that
.
error
(
err
);
}
}
);
);
};
};
f
.
getIndices
();
};
/**
/**
* Update
the document metadata and update the index
* Post
the document metadata and update the index
* @method pu
t
* @method pos
t
* @param {object} command The JIO command
* @param {object} command The JIO command
*/
*/
that
.
post
=
function
(
command
)
{
that
.
post
=
function
(
command
)
{
priv
.
postOrPut
(
command
,
'
POST
'
);
priv
.
genericRequest
(
command
,
'
post
'
);
};
};
/**
/**
...
@@ -710,7 +592,7 @@ jIO.addStorageType('indexed', function (spec, my) {
...
@@ -710,7 +592,7 @@ jIO.addStorageType('indexed', function (spec, my) {
* @param {object} command The JIO command
* @param {object} command The JIO command
*/
*/
that
.
put
=
function
(
command
)
{
that
.
put
=
function
(
command
)
{
priv
.
postOrPut
(
command
,
'
PUT
'
);
priv
.
genericRequest
(
command
,
'
put
'
);
};
};
/**
/**
...
@@ -719,7 +601,7 @@ jIO.addStorageType('indexed', function (spec, my) {
...
@@ -719,7 +601,7 @@ jIO.addStorageType('indexed', function (spec, my) {
* @param {object} command The JIO command
* @param {object} command The JIO command
*/
*/
that
.
putAttachment
=
function
(
command
)
{
that
.
putAttachment
=
function
(
command
)
{
priv
.
postOrPut
(
command
,
'
PUTATTACHMENT
'
);
priv
.
genericRequest
(
command
,
'
putAttachment
'
);
};
};
/**
/**
...
@@ -728,18 +610,7 @@ jIO.addStorageType('indexed', function (spec, my) {
...
@@ -728,18 +610,7 @@ jIO.addStorageType('indexed', function (spec, my) {
* @param {object} command The JIO command
* @param {object} command The JIO command
*/
*/
that
.
get
=
function
(
command
)
{
that
.
get
=
function
(
command
)
{
that
.
addJob
(
priv
.
genericRequest
(
command
,
'
get
'
);
"
get
"
,
priv
.
substorage
,
command
.
cloneDoc
(),
command
.
cloneOption
(),
function
(
response
)
{
that
.
success
(
response
);
},
function
(
err
)
{
that
.
error
(
err
);
}
);
};
};
/**
/**
...
@@ -748,18 +619,7 @@ jIO.addStorageType('indexed', function (spec, my) {
...
@@ -748,18 +619,7 @@ jIO.addStorageType('indexed', function (spec, my) {
* @param {object} command The JIO command
* @param {object} command The JIO command
*/
*/
that
.
getAttachment
=
function
(
command
)
{
that
.
getAttachment
=
function
(
command
)
{
that
.
addJob
(
priv
.
genericRequest
(
command
,
'
getAttachment
'
);
"
getAttachment
"
,
priv
.
substorage
,
command
.
cloneDoc
(),
command
.
cloneOption
(),
function
(
response
)
{
that
.
success
(
response
);
},
function
(
err
)
{
that
.
error
(
err
);
}
);
};
};
/**
/**
...
@@ -768,129 +628,16 @@ jIO.addStorageType('indexed', function (spec, my) {
...
@@ -768,129 +628,16 @@ jIO.addStorageType('indexed', function (spec, my) {
* @param {object} command The JIO command
* @param {object} command The JIO command
*/
*/
that
.
remove
=
function
(
command
)
{
that
.
remove
=
function
(
command
)
{
var
f
=
{},
indices
,
doc
,
docid
,
option
;
priv
.
genericRequest
(
command
,
'
remove
'
);
doc
=
command
.
cloneDoc
();
option
=
command
.
cloneOption
();
f
.
removeDocument
=
function
(
type
)
{
that
.
addJob
(
"
remove
"
,
priv
.
substorage
,
doc
,
option
,
function
(
response
)
{
that
.
success
(
response
);
},
function
()
{
that
.
error
({
"
status
"
:
409
,
"
statusText
"
:
"
Conflict
"
,
"
error
"
:
"
conflict
"
,
"
message
"
:
"
Document Update Conflict
"
,
"
reason
"
:
"
Could not delete document or attachment
"
});
}
);
};
f
.
getIndices
=
function
()
{
that
.
addJob
(
"
get
"
,
priv
.
substorage
,
{
"
_id
"
:
priv
.
index_suffix
},
option
,
function
(
response
)
{
// if deleting an attachment
if
(
typeof
command
.
getAttachmentId
()
===
'
string
'
)
{
f
.
removeDocument
(
'
attachment
'
);
}
else
{
indices
=
priv
.
cleanIndices
(
response
,
doc
);
// store update index file
that
.
addJob
(
"
put
"
,
priv
.
substorage
,
indices
,
command
.
cloneOption
(),
function
()
{
// remove actual document
f
.
removeDocument
(
'
doc
'
);
},
function
(
err
)
{
err
.
message
=
"
Cannot save index file
"
;
that
.
error
(
err
);
}
);
}
},
function
()
{
that
.
error
({
"
status
"
:
404
,
"
statusText
"
:
"
Not Found
"
,
"
error
"
:
"
not_found
"
,
"
message
"
:
"
Document index not found, please check document ID
"
,
"
reason
"
:
"
Incorrect document ID
"
});
return
;
}
);
};
f
.
getIndices
();
};
};
/**
/**
* Remove document - removing documents updates index!.
* Remove attachment
* @method remove
* @method removeAttachment
* @param {object} command The JIO command
* @param {object} command The JIO command
*/
*/
that
.
removeAttachment
=
function
(
command
)
{
that
.
removeAttachment
=
function
(
command
)
{
var
f
=
{},
indices
,
doc
,
docid
,
option
;
priv
.
genericRequest
(
command
,
'
removeAttachment
'
);
doc
=
command
.
cloneDoc
();
option
=
command
.
cloneOption
();
f
.
removeDocument
=
function
(
type
)
{
that
.
addJob
(
"
removeAttachment
"
,
priv
.
substorage
,
doc
,
option
,
that
.
success
,
that
.
error
);
};
f
.
getIndices
=
function
()
{
that
.
addJob
(
"
get
"
,
priv
.
substorage
,
{
"
_id
"
:
priv
.
index_suffix
},
option
,
function
(
response
)
{
// if deleting an attachment
if
(
typeof
command
.
getAttachmentId
()
===
'
string
'
)
{
f
.
removeDocument
(
'
attachment
'
);
}
else
{
indices
=
priv
.
cleanIndices
(
response
,
doc
);
// store update index file
that
.
addJob
(
"
put
"
,
priv
.
substorage
,
indices
,
command
.
cloneOption
(),
function
()
{
// remove actual document
f
.
removeDocument
(
'
doc
'
);
},
function
(
err
)
{
err
.
message
=
"
Cannot save index file
"
;
that
.
error
(
err
);
}
);
}
},
function
(
err
)
{
that
.
error
(
err
);
}
);
};
f
.
getIndices
();
};
};
/**
/**
...
@@ -900,61 +647,56 @@ jIO.addStorageType('indexed', function (spec, my) {
...
@@ -900,61 +647,56 @@ jIO.addStorageType('indexed', function (spec, my) {
* @method allDocs
* @method allDocs
* @param {object} command The JIO command
* @param {object} command The JIO command
*/
*/
//{
// "total_rows": 4,
// "rows": [
// {
// "id": "otherdoc",
// "key": "otherdoc",
// "value": {
// "rev": "1-3753476B70A49EA4D8C9039E7B04254C"
// }
// },{...}
// ]
//}
that
.
allDocs
=
function
(
command
)
{
that
.
allDocs
=
function
(
command
)
{
var
f
=
{},
option
,
all_docs_response
,
query_object
,
query_syntax
,
var
option
=
command
.
cloneOption
(),
query_response
;
index
=
priv
.
selectIndex
(
option
.
select_list
||
[]);
option
=
command
.
cloneOption
();
// Include docs option is ignored, if you want to get all the document,
// don't use index storage!
option
.
select_list
=
option
.
select_list
||
[];
option
.
select_list
.
push
(
"
_id
"
);
priv
.
getIndexDatabase
(
option
,
index
,
function
(
db
)
{
var
i
,
id
;
db
=
db
.
_database
;
complex_queries
.
QueryFactory
.
create
(
option
.
query
||
''
).
exec
(
db
,
option
);
for
(
i
=
0
;
i
<
db
.
length
;
i
+=
1
)
{
id
=
db
[
i
].
_id
;
delete
db
[
i
].
_id
;
db
[
i
]
=
{
"
id
"
:
id
,
"
key
"
:
id
,
"
value
"
:
db
[
i
],
};
}
that
.
success
({
"
total_rows
"
:
db
.
length
,
"
rows
"
:
db
});
});
};
f
.
getIndices
=
function
()
{
// that.repair = function (command) {
that
.
addJob
(
// todo: repair
"
get
"
,
// easy but don't have time
priv
.
substorage
,
// if _id is an index id, then repair the index by doing an
{
"
_id
"
:
priv
.
index_suffix
},
// allDocs and recreating the database from scratch. end.
option
,
// };
function
(
response
)
{
query_syntax
=
command
.
getOption
(
'
query
'
);
return
that
;
if
(
query_syntax
!==
undefined
)
{
// build complex query object
query_object
=
priv
.
constructQueryObject
(
response
,
query_syntax
);
if
(
query_object
.
length
===
0
)
{
that
.
addJob
(
"
allDocs
"
,
priv
.
substorage
,
undefined
,
option
,
that
.
success
,
that
.
error
);
}
else
{
// we can use index, run query on index
query_response
=
jIO
.
ComplexQueries
.
query
(
query_syntax
,
query_object
);
that
.
success
(
query_response
);
}
}
}
else
if
(
command
.
getOption
(
'
include_docs
'
))
{
priv
.
allDocsResponseFromIndex
(
response
,
true
,
option
);
if
(
typeof
exports
===
"
object
"
)
{
// nodejs export module
Object
.
defineProperty
(
exports
,
"
indexStorage
"
,
{
configurable
:
false
,
enumerable
:
true
,
writable
:
false
,
value
:
indexStorage
});
}
else
if
(
typeof
define
===
"
function
"
&&
define
.
amd
)
{
// requirejs export
define
(
indexStorage
);
}
else
{
}
else
{
all_docs_response
=
// classical browser and web workers JIO export
priv
.
allDocsResponseFromIndex
(
response
,
false
,
option
);
jIO
.
addStorageType
(
"
indexed
"
,
indexStorage
);
that
.
success
(
all_docs_response
);
}
}
},
}());
that
.
error
);
};
f
.
getIndices
();
};
return
that
;
});
test/jiotests.js
View file @
4c418987
...
@@ -4327,13 +4327,9 @@ test ("Post", function () {
...
@@ -4327,13 +4327,9 @@ test ("Post", function () {
o
.
jio
=
JIO
.
newJio
({
o
.
jio
=
JIO
.
newJio
({
"
type
"
:
"
indexed
"
,
"
type
"
:
"
indexed
"
,
"
indices
"
:
[
"
indices
"
:
[
{
"
name
"
:
"
indexA
"
,
"
fields
"
:[
"
findMeA
"
]},
{
"
id
"
:
"
A
"
,
"
index
"
:
[
"
title
"
]},
{
"
name
"
:
"
indexAB
"
,
"
fields
"
:[
"
findMeA
"
,
"
findMeB
"
]}
{
"
id
"
:
"
B
"
,
"
index
"
:
[
"
title
"
,
"
year
"
]}
],
],
"
field_types
"
:
{
"
findMeA
"
:
"
string
"
,
"
findMeB
"
:
"
string
"
},
"
sub_storage
"
:
{
"
sub_storage
"
:
{
"
type
"
:
"
local
"
,
"
type
"
:
"
local
"
,
"
username
"
:
"
ipost
"
,
"
username
"
:
"
ipost
"
,
...
@@ -4342,39 +4338,49 @@ test ("Post", function () {
...
@@ -4342,39 +4338,49 @@ test ("Post", function () {
});
});
// post without id
// post without id
o
.
spy
(
o
,
"
status
"
,
undefined
,
"
Post without id
"
);
o
.
spy
(
o
,
"
jobstatus
"
,
"
done
"
,
"
Post without id
"
);
o
.
jio
.
post
({},
o
.
f
);
o
.
jio
.
post
({},
function
(
err
,
response
)
{
o
.
id
=
(
response
||
{}).
id
;
o
.
f
(
err
,
response
);
});
o
.
tick
(
o
);
o
.
tick
(
o
);
// post non empty document
// post non empty document
o
.
doc
=
{
"
_id
"
:
"
some_id
"
,
"
title
"
:
"
myPost1
"
,
o
.
doc
=
{
"
_id
"
:
"
some_id
"
,
"
title
"
:
"
My Title
"
,
"
findMeA
"
:
"
keyword_abc
"
,
"
findMeB
"
:
"
keyword_def
"
"
year
"
:
2000
,
"
hey
"
:
"
def
"
};
};
o
.
spy
(
o
,
"
value
"
,
{
"
ok
"
:
true
,
"
id
"
:
"
some_id
"
},
"
Post document
"
);
o
.
spy
(
o
,
"
value
"
,
{
"
ok
"
:
true
,
"
id
"
:
"
some_id
"
},
"
Post document
"
);
o
.
jio
.
post
(
o
.
doc
,
o
.
f
);
o
.
jio
.
post
(
o
.
doc
,
o
.
f
);
o
.
tick
(
o
);
o
.
tick
(
o
);
// check document
// check document
o
.
fakeIndex
=
{
o
.
fakeIndexA
=
{
"
_id
"
:
"
ipost_indices.json
"
,
"
_id
"
:
"
A
"
,
"
indexAB
"
:
{
"
indexing
"
:
[
"
title
"
],
"
findMeA
"
:
{
"
free
"
:
[],
"
keyword_abc
"
:[
"
some_id
"
]
"
location
"
:
{
"
some_id
"
:
0
},
},
"
findMeB
"
:
{
"
database
"
:
[
"
keyword_def
"
:[
"
some_id
"
]
{
"
_id
"
:
"
some_id
"
,
"
title
"
:
"
My Title
"
}
}
]
};
o
.
spy
(
o
,
"
value
"
,
o
.
fakeIndexA
,
"
Check index file
"
);
o
.
jio
.
get
({
"
_id
"
:
"
A
"
},
o
.
f
);
o
.
tick
(
o
);
o
.
fakeIndexB
=
{
"
_id
"
:
"
B
"
,
"
indexing
"
:
[
"
title
"
,
"
year
"
],
"
free
"
:
[],
"
location
"
:
{
"
some_id
"
:
0
},
},
"
indexA
"
:
{
"
database
"
:
[
"
findMeA
"
:
{
{
"
_id
"
:
"
some_id
"
,
"
title
"
:
"
My Title
"
,
"
year
"
:
2000
}
"
keyword_abc
"
:[
"
some_id
"
]
]
}
}
};
};
o
.
jio
.
get
({
"
_id
"
:
"
ipost_indices.json
"
},
function
(
err
,
response
)
{
o
.
spy
(
o
,
"
value
"
,
o
.
fakeIndexB
,
"
Check index file
"
);
o
.
actualIndex
=
response
;
o
.
jio
.
get
({
"
_id
"
:
"
B
"
},
o
.
f
);
deepEqual
(
o
.
actualIndex
,
o
.
fakeIndex
,
"
Check index file
"
);
});
o
.
tick
(
o
);
o
.
tick
(
o
);
// post with escapable characters
// post with escapable characters
...
@@ -4404,13 +4410,9 @@ test ("Put", function(){
...
@@ -4404,13 +4410,9 @@ test ("Put", function(){
o
.
jio
=
JIO
.
newJio
({
o
.
jio
=
JIO
.
newJio
({
"
type
"
:
"
indexed
"
,
"
type
"
:
"
indexed
"
,
"
indices
"
:
[
"
indices
"
:
[
{
"
name
"
:
"
indexA
"
,
"
fields
"
:
[
"
author
"
]},
{
"
id
"
:
"
A
"
,
"
index
"
:
[
"
author
"
]},
{
"
name
"
:
"
indexAB
"
,
"
fields
"
:[
"
author
"
,
"
year
"
]}
{
"
id
"
:
"
B
"
,
"
index
"
:
[
"
year
"
]}
],
],
"
field_types
"
:
{
"
author
"
:
"
string
"
,
"
year
"
:
"
number
"
},
"
sub_storage
"
:
{
"
sub_storage
"
:
{
"
type
"
:
"
local
"
,
"
type
"
:
"
local
"
,
"
username
"
:
"
iput
"
,
"
username
"
:
"
iput
"
,
...
@@ -4425,89 +4427,64 @@ test ("Put", function(){
...
@@ -4425,89 +4427,64 @@ test ("Put", function(){
o
.
tick
(
o
);
o
.
tick
(
o
);
// put non empty document
// put non empty document
o
.
doc
=
{
"
_id
"
:
"
put1
"
,
"
title
"
:
"
myPut1
"
,
"
author
"
:
"
John Doe
"
};
o
.
doc
=
{
"
_id
"
:
"
put1
"
,
"
title
"
:
"
myPut1
"
,
"
author
"
:
"
John Doe
"
};
o
.
spy
(
o
,
"
value
"
,
{
"
ok
"
:
true
,
"
id
"
:
"
put1
"
},
"
Put-create document
"
);
o
.
spy
(
o
,
"
value
"
,
{
"
ok
"
:
true
,
"
id
"
:
"
put1
"
},
"
Put-create document
"
);
o
.
jio
.
put
(
o
.
doc
,
o
.
f
);
o
.
jio
.
put
(
o
.
doc
,
o
.
f
);
o
.
tick
(
o
);
o
.
tick
(
o
);
// check index file
// check index file
o
.
fakeIndex
=
{
o
.
fakeIndexA
=
{
"
indexA
"
:
{
"
_id
"
:
"
A
"
,
"
author
"
:
{
"
indexing
"
:
[
"
author
"
],
"
John Doe
"
:
[
"
put1
"
]
"
free
"
:
[],
}
"
location
"
:
{
"
put1
"
:
0
},
},
"
indexAB
"
:
{
"
database
"
:
[{
"
_id
"
:
"
put1
"
,
"
author
"
:
"
John Doe
"
}]
"
author
"
:
{
"
John Doe
"
:
[
"
put1
"
]
},
"
year
"
:
{}
},
"
_id
"
:
"
iput_indices.json
"
};
};
o
.
jio
.
get
({
"
_id
"
:
"
iput_indices.json
"
},
function
(
err
,
response
)
{
o
.
spy
(
o
,
"
value
"
,
o
.
fakeIndexA
,
"
Check index file
"
);
o
.
actualIndex
=
response
;
o
.
jio
.
get
({
"
_id
"
:
"
A
"
},
o
.
f
);
deepEqual
(
o
.
actualIndex
,
o
.
fakeIndex
,
"
Check index file
"
);
o
.
tick
(
o
);
});
o
.
fakeIndexB
=
{
"
_id
"
:
"
B
"
,
"
indexing
"
:
[
"
year
"
],
"
free
"
:
[],
"
location
"
:
{},
"
database
"
:
[]
};
o
.
spy
(
o
,
"
value
"
,
o
.
fakeIndexB
,
"
Check index file
"
);
o
.
jio
.
get
({
"
_id
"
:
"
B
"
},
o
.
f
);
o
.
tick
(
o
);
o
.
tick
(
o
);
// modify document - modify keyword on index!
// modify document - modify keyword on index!
o
.
doc
=
{
"
_id
"
:
"
put1
"
,
"
title
"
:
"
myPuttter1
"
,
"
author
"
:
"
Jane Doe
"
};
o
.
doc
=
{
"
_id
"
:
"
put1
"
,
"
title
"
:
"
myPuttter1
"
,
"
author
"
:
"
Jane Doe
"
};
o
.
spy
(
o
,
"
value
"
,
{
"
ok
"
:
true
,
"
id
"
:
"
put1
"
},
"
Modify existing document
"
);
o
.
spy
(
o
,
"
value
"
,
{
"
ok
"
:
true
,
"
id
"
:
"
put1
"
},
"
Modify existing document
"
);
o
.
jio
.
put
(
o
.
doc
,
o
.
f
);
o
.
jio
.
put
(
o
.
doc
,
o
.
f
);
o
.
tick
(
o
);
o
.
tick
(
o
);
// check index file
// check index file
o
.
fakeIndex
=
{
o
.
fakeIndexA
.
database
[
0
].
author
=
"
Jane Doe
"
;
"
indexA
"
:
{
o
.
spy
(
o
,
"
value
"
,
o
.
fakeIndexA
,
"
Check index file
"
);
"
author
"
:
{
o
.
jio
.
get
({
"
_id
"
:
"
A
"
},
o
.
f
);
"
Jane Doe
"
:
[
"
put1
"
]
}
},
"
indexAB
"
:
{
"
author
"
:
{
"
Jane Doe
"
:
[
"
put1
"
]
},
"
year
"
:
{}
},
"
_id
"
:
"
iput_indices.json
"
};
o
.
jio
.
get
({
"
_id
"
:
"
iput_indices.json
"
},
function
(
err
,
response
)
{
o
.
actualIndex
=
response
;
deepEqual
(
o
.
actualIndex
,
o
.
fakeIndex
,
"
Check index file
"
);
});
o
.
tick
(
o
);
o
.
tick
(
o
);
// add new document with same keyword!
// add new document with same keyword!
o
.
doc
=
{
"
_id
"
:
"
new_doc
"
,
"
title
"
:
"
myPut2
"
,
"
author
"
:
"
Jane Doe
"
};
o
.
doc
=
{
"
_id
"
:
"
new_doc
"
,
"
title
"
:
"
myPut2
"
,
"
author
"
:
"
Jane Doe
"
};
o
.
spy
(
o
,
"
value
"
,
{
"
ok
"
:
true
,
"
id
"
:
"
new_doc
"
},
o
.
spy
(
o
,
"
value
"
,
{
"
ok
"
:
true
,
"
id
"
:
"
new_doc
"
},
"
Add new document with same keyword
"
);
"
Add new document with same keyword
"
);
o
.
jio
.
put
(
o
.
doc
,
o
.
f
);
o
.
jio
.
put
(
o
.
doc
,
o
.
f
);
o
.
tick
(
o
);
o
.
tick
(
o
);
// check index file
// check index file
o
.
fakeIndex
=
{
o
.
fakeIndexA
.
location
.
new_doc
=
1
;
"
indexA
"
:
{
o
.
fakeIndexA
.
database
.
push
({
"
_id
"
:
"
new_doc
"
,
"
author
"
:
"
Jane Doe
"
});
"
author
"
:
{
o
.
spy
(
o
,
"
value
"
,
o
.
fakeIndexA
,
"
Check index file
"
);
"
Jane Doe
"
:
[
"
put1
"
,
"
new_doc
"
]
o
.
jio
.
get
({
"
_id
"
:
"
A
"
},
o
.
f
);
}
},
"
indexAB
"
:
{
"
author
"
:
{
"
Jane Doe
"
:
[
"
put1
"
,
"
new_doc
"
]
},
"
year
"
:
{}
},
"
_id
"
:
"
iput_indices.json
"
};
o
.
jio
.
get
({
"
_id
"
:
"
iput_indices.json
"
},
function
(
err
,
response
)
{
o
.
actualIndex
=
response
;
deepEqual
(
o
.
actualIndex
,
o
.
fakeIndex
,
"
Check index file
"
);
});
o
.
tick
(
o
);
o
.
tick
(
o
);
// add second keyword to index file
// add second keyword to index file
o
.
doc
=
{
"
_id
"
:
"
put1
"
,
"
title
"
:
"
myPut2
"
,
"
author
"
:
"
Jane Doe
"
,
o
.
doc
=
{
"
_id
"
:
"
put1
"
,
"
title
"
:
"
myPut2
"
,
"
author
"
:
"
Jane Doe
"
,
"
year
"
:
"
1912
"
};
"
year
"
:
"
1912
"
};
o
.
spy
(
o
,
"
value
"
,
{
"
ok
"
:
true
,
"
id
"
:
"
put1
"
},
o
.
spy
(
o
,
"
value
"
,
{
"
ok
"
:
true
,
"
id
"
:
"
put1
"
},
"
add second keyword to index file
"
);
"
add second keyword to index file
"
);
...
@@ -4515,26 +4492,14 @@ test ("Put", function(){
...
@@ -4515,26 +4492,14 @@ test ("Put", function(){
o
.
tick
(
o
);
o
.
tick
(
o
);
// check index file
// check index file
o
.
fakeIndex
=
{
o
.
spy
(
o
,
"
value
"
,
o
.
fakeIndexA
,
"
Check index file
"
);
"
indexA
"
:
{
o
.
jio
.
get
({
"
_id
"
:
"
A
"
},
o
.
f
);
"
author
"
:
{
o
.
tick
(
o
);
"
Jane Doe
"
:
[
"
put1
"
]
}
o
.
fakeIndexB
.
location
.
put1
=
0
;
},
o
.
fakeIndexB
.
database
.
push
({
"
_id
"
:
"
put1
"
,
"
year
"
:
"
1912
"
});
"
indexAB
"
:
{
o
.
spy
(
o
,
"
value
"
,
o
.
fakeIndexB
,
"
Check index file
"
);
"
author
"
:
{
o
.
jio
.
get
({
"
_id
"
:
"
B
"
},
o
.
f
);
"
Jane Doe
"
:
[
"
put1
"
]
},
"
year
"
:
{
"
1912
"
:
[
"
put1
"
]
}
},
"
_id
"
:
"
iput_indices.json
"
};
o
.
jio
.
get
({
"
_id
"
:
"
iput_indices.json
"
},
function
(
err
,
response
)
{
o
.
actualIndex
=
response
;
deepEqual
(
o
.
actualIndex
,
o
.
fakeIndex
,
"
Check index file
"
);
});
o
.
tick
(
o
);
o
.
tick
(
o
);
// remove a keyword from an existing document
// remove a keyword from an existing document
...
@@ -4545,26 +4510,11 @@ test ("Put", function(){
...
@@ -4545,26 +4510,11 @@ test ("Put", function(){
o
.
tick
(
o
);
o
.
tick
(
o
);
// check index file
// check index file
o
.
fakeIndex
=
{
delete
o
.
fakeIndexA
.
location
.
new_doc
;
"
indexA
"
:
{
o
.
fakeIndexA
.
database
[
1
]
=
null
;
"
author
"
:
{
o
.
fakeIndexA
.
free
.
push
(
1
);
"
Jane Doe
"
:
[
"
put1
"
]
o
.
spy
(
o
,
"
value
"
,
o
.
fakeIndexA
,
"
Check index file
"
);
}
o
.
jio
.
get
({
"
_id
"
:
"
A
"
},
o
.
f
);
},
"
indexAB
"
:
{
"
author
"
:
{
"
Jane Doe
"
:
[
"
put1
"
]
},
"
year
"
:
{
"
1912
"
:
[
"
put1
"
]
}
},
"
_id
"
:
"
iput_indices.json
"
};
o
.
jio
.
get
({
"
_id
"
:
"
iput_indices.json
"
},
function
(
err
,
response
)
{
o
.
actualIndex
=
response
;
deepEqual
(
o
.
actualIndex
,
o
.
fakeIndex
,
"
Check index file
"
);
});
o
.
tick
(
o
);
o
.
tick
(
o
);
o
.
jio
.
stop
();
o
.
jio
.
stop
();
...
@@ -4580,13 +4530,9 @@ test ("PutAttachment", function(){
...
@@ -4580,13 +4530,9 @@ test ("PutAttachment", function(){
o
.
jio
=
JIO
.
newJio
({
o
.
jio
=
JIO
.
newJio
({
"
type
"
:
"
indexed
"
,
"
type
"
:
"
indexed
"
,
"
indices
"
:
[
"
indices
"
:
[
{
"
name
"
:
"
indexA
"
,
"
fields
"
:
[
"
author
"
]},
{
"
id
"
:
"
A
"
,
"
index
"
:
[
"
author
"
]},
{
"
name
"
:
"
indexAB
"
,
"
fields
"
:[
"
author
"
,
"
year
"
]}
{
"
id
"
:
"
B
"
,
"
index
"
:
[
"
year
"
]}
],
],
"
field_types
"
:
{
"
author
"
:
"
string
"
,
"
year
"
:
"
number
"
},
"
sub_storage
"
:
{
"
sub_storage
"
:
{
"
type
"
:
"
local
"
,
"
type
"
:
"
local
"
,
"
username
"
:
"
iputatt
"
,
"
username
"
:
"
iputatt
"
,
...
@@ -4697,13 +4643,9 @@ test ("Get", function(){
...
@@ -4697,13 +4643,9 @@ test ("Get", function(){
o
.
jio
=
JIO
.
newJio
({
o
.
jio
=
JIO
.
newJio
({
"
type
"
:
"
indexed
"
,
"
type
"
:
"
indexed
"
,
"
indices
"
:
[
"
indices
"
:
[
{
"
name
"
:
"
indexA
"
,
"
fields
"
:
[
"
author
"
]},
{
"
id
"
:
"
A
"
,
"
index
"
:
[
"
author
"
]},
{
"
name
"
:
"
indexAB
"
,
"
fields
"
:[
"
author
"
,
"
year
"
]}
{
"
id
"
:
"
B
"
,
"
index
"
:
[
"
year
"
]}
],
],
"
field_types
"
:
{
"
author
"
:
"
string
"
,
"
year
"
:
"
number
"
},
"
sub_storage
"
:
{
"
sub_storage
"
:
{
"
type
"
:
"
local
"
,
"
type
"
:
"
local
"
,
"
username
"
:
"
iget
"
,
"
username
"
:
"
iget
"
,
...
@@ -4767,13 +4709,9 @@ test ("Remove", function(){
...
@@ -4767,13 +4709,9 @@ test ("Remove", function(){
o
.
jio
=
JIO
.
newJio
({
o
.
jio
=
JIO
.
newJio
({
"
type
"
:
"
indexed
"
,
"
type
"
:
"
indexed
"
,
"
indices
"
:
[
"
indices
"
:
[
{
"
name
"
:
"
indexA
"
,
"
fields
"
:
[
"
author
"
]},
{
"
id
"
:
"
A
"
,
"
index
"
:
[
"
author
"
]},
{
"
name
"
:
"
indexAB
"
,
"
fields
"
:[
"
author
"
,
"
year
"
]}
{
"
id
"
:
"
B
"
,
"
index
"
:
[
"
year
"
]}
],
],
"
field_types
"
:
{
"
author
"
:
"
string
"
,
"
year
"
:
"
number
"
},
"
sub_storage
"
:
{
"
sub_storage
"
:
{
"
type
"
:
"
local
"
,
"
type
"
:
"
local
"
,
"
username
"
:
"
irem
"
,
"
username
"
:
"
irem
"
,
...
@@ -4809,26 +4747,30 @@ test ("Remove", function(){
...
@@ -4809,26 +4747,30 @@ test ("Remove", function(){
o
.
tick
(
o
);
o
.
tick
(
o
);
// check index
// check index
o
.
fakeIndex
=
{
o
.
fakeIndex
A
=
{
"
_id
"
:
"
irem_indices.json
"
,
"
_id
"
:
"
A
"
,
"
index
A
"
:
{
"
index
ing
"
:
[
"
author
"
],
"
author
"
:
{
"
free
"
:
[
0
],
"
Martin Mustermann
"
:
[
"
removeAlso
"
]
"
location
"
:
{
}
"
removeAlso
"
:
1
},
},
"
indexAB
"
:
{
"
database
"
:
[
null
,
{
"
_id
"
:
"
removeAlso
"
,
"
author
"
:
"
Martin Mustermann
"
}]
"
year
"
:
{
};
"
2525
"
:
[
"
removeAlso
"
]
o
.
spy
(
o
,
"
value
"
,
o
.
fakeIndexA
,
"
Check index file
"
);
o
.
jio
.
get
({
"
_id
"
:
"
A
"
},
o
.
f
);
o
.
tick
(
o
);
o
.
fakeIndexB
=
{
"
_id
"
:
"
B
"
,
"
indexing
"
:
[
"
year
"
],
"
free
"
:
[
0
],
"
location
"
:
{
"
removeAlso
"
:
1
},
},
"
author
"
:
{
"
database
"
:
[
null
,
{
"
_id
"
:
"
removeAlso
"
,
"
year
"
:
"
2525
"
}]
"
Martin Mustermann
"
:
[
"
removeAlso
"
]
}
}
};
};
o
.
jio
.
get
({
"
_id
"
:
"
irem_indices.json
"
},
function
(
err
,
response
){
o
.
spy
(
o
,
"
value
"
,
o
.
fakeIndexB
,
"
Check index file
"
);
o
.
actualIndex
=
response
;
o
.
jio
.
get
({
"
_id
"
:
"
B
"
},
o
.
f
);
deepEqual
(
o
.
actualIndex
,
o
.
fakeIndex
,
"
Check index file
"
);
});
o
.
tick
(
o
);
o
.
tick
(
o
);
// check document
// check document
...
@@ -4868,29 +4810,18 @@ test ("Remove", function(){
...
@@ -4868,29 +4810,18 @@ test ("Remove", function(){
o
.
tick
(
o
);
o
.
tick
(
o
);
// check index
// check index
o
.
fakeIndex
=
{
o
.
fakeIndexA
.
free
=
[];
"
_id
"
:
"
irem_indices.json
"
,
o
.
fakeIndexA
.
location
.
remove3
=
0
;
"
indexA
"
:
{
o
.
fakeIndexA
.
database
[
0
]
=
{
"
_id
"
:
"
remove3
"
,
"
author
"
:
"
Mrs Sunshine
"
};
"
author
"
:{
o
.
spy
(
o
,
"
value
"
,
o
.
fakeIndexA
,
"
Check index file
"
);
"
Martin Mustermann
"
:
[
"
removeAlso
"
],
o
.
jio
.
get
({
"
_id
"
:
"
A
"
},
o
.
f
);
"
Mrs Sunshine
"
:
[
"
remove3
"
]
o
.
tick
(
o
);
}
},
o
.
fakeIndexB
.
free
=
[];
"
indexAB
"
:
{
o
.
fakeIndexB
.
location
.
remove3
=
0
;
"
year
"
:
{
o
.
fakeIndexB
.
database
[
0
]
=
{
"
_id
"
:
"
remove3
"
,
"
year
"
:
"
1234
"
};
"
1234
"
:
[
"
remove3
"
],
o
.
spy
(
o
,
"
value
"
,
o
.
fakeIndexB
,
"
Check index file
"
);
"
2525
"
:
[
"
removeAlso
"
]
o
.
jio
.
get
({
"
_id
"
:
"
B
"
},
o
.
f
);
},
"
author
"
:
{
"
Martin Mustermann
"
:
[
"
removeAlso
"
],
"
Mrs Sunshine
"
:
[
"
remove3
"
]
}
}
};
o
.
jio
.
get
({
"
_id
"
:
"
irem_indices.json
"
},
function
(
err
,
response
)
{
o
.
actualIndex
=
response
;
deepEqual
(
o
.
actualIndex
,
o
.
fakeIndex
,
"
Check index file
"
);
});
o
.
tick
(
o
);
o
.
tick
(
o
);
// remove document and attachment together
// remove document and attachment together
...
@@ -4900,26 +4831,18 @@ test ("Remove", function(){
...
@@ -4900,26 +4831,18 @@ test ("Remove", function(){
o
.
tick
(
o
);
o
.
tick
(
o
);
// check index
// check index
o
.
fakeIndex
=
{
o
.
fakeIndexA
.
free
=
[
0
];
"
_id
"
:
"
irem_indices.json
"
,
delete
o
.
fakeIndexA
.
location
.
remove3
;
"
indexA
"
:
{
o
.
fakeIndexA
.
database
[
0
]
=
null
;
"
author
"
:
{
o
.
spy
(
o
,
"
value
"
,
o
.
fakeIndexA
,
"
Check index file
"
);
"
Martin Mustermann
"
:
[
"
removeAlso
"
]
o
.
jio
.
get
({
"
_id
"
:
"
A
"
},
o
.
f
);
}
o
.
tick
(
o
);
},
"
indexAB
"
:
{
o
.
fakeIndexB
.
free
=
[
0
];
"
year
"
:
{
delete
o
.
fakeIndexB
.
location
.
remove3
;
"
2525
"
:
[
"
removeAlso
"
]
o
.
fakeIndexB
.
database
[
0
]
=
null
;
},
o
.
spy
(
o
,
"
value
"
,
o
.
fakeIndexB
,
"
Check index file
"
);
"
author
"
:
{
o
.
jio
.
get
({
"
_id
"
:
"
B
"
},
o
.
f
);
"
Martin Mustermann
"
:
[
"
removeAlso
"
]
}
}
};
o
.
jio
.
get
({
"
_id
"
:
"
irem_indices.json
"
},
function
(
err
,
response
)
{
o
.
actualIndex
=
response
;
deepEqual
(
o
.
actualIndex
,
o
.
fakeIndex
,
"
Check index file
"
);
});
o
.
tick
(
o
);
o
.
tick
(
o
);
// check attachment
// check attachment
...
@@ -4942,13 +4865,9 @@ test ("AllDocs", function () {
...
@@ -4942,13 +4865,9 @@ test ("AllDocs", function () {
o
.
jio
=
JIO
.
newJio
({
o
.
jio
=
JIO
.
newJio
({
"
type
"
:
"
indexed
"
,
"
type
"
:
"
indexed
"
,
"
indices
"
:
[
"
indices
"
:
[
{
"
name
"
:
"
indexA
"
,
"
fields
"
:
[
"
author
"
]},
{
"
id
"
:
"
A
"
,
"
index
"
:
[
"
author
"
]},
{
"
name
"
:
"
indexAB
"
,
"
fields
"
:[
"
author
"
,
"
year
"
]}
{
"
id
"
:
"
B
"
,
"
index
"
:
[
"
year
"
]}
],
],
"
field_types
"
:
{
"
author
"
:
"
string
"
,
"
year
"
:
"
number
"
},
"
sub_storage
"
:
{
"
sub_storage
"
:
{
"
type
"
:
"
local
"
,
"
type
"
:
"
local
"
,
"
username
"
:
"
iall
"
,
"
username
"
:
"
iall
"
,
...
@@ -4983,34 +4902,25 @@ test ("AllDocs", function () {
...
@@ -4983,34 +4902,25 @@ test ("AllDocs", function () {
o
.
tick
(
o
);
o
.
tick
(
o
);
// check index
// check index
o
.
fakeIndex
=
{
o
.
fakeIndexA
=
{
"
_id
"
:
"
iall_indices.json
"
,
"
_id
"
:
"
A
"
,
"
indexA
"
:
{
"
indexing
"
:
[
"
author
"
],
"
author
"
:
{
"
free
"
:
[],
"
Dr. No
"
:
[
"
dragon.doc
"
],
"
location
"
:
{
"
Dr. Who
"
:
[
"
timemachine
"
],
"
dragon.doc
"
:
0
,
"
Dr. Snuggles
"
:
[
"
rocket.ppt
"
],
"
timemachine
"
:
1
,
"
Dr. House
"
:[
"
stick.jpg
"
]
"
rocket.ppt
"
:
2
,
}
"
stick.jpg
"
:
3
},
"
indexAB
"
:
{
"
author
"
:
{
"
Dr. No
"
:
[
"
dragon.doc
"
],
"
Dr. Who
"
:
[
"
timemachine
"
],
"
Dr. Snuggles
"
:
[
"
rocket.ppt
"
],
"
Dr. House
"
:[
"
stick.jpg
"
]
},
},
"
year
"
:
{
"
database
"
:
[
"
1968
"
:
[
"
dragon.doc
"
,
"
timemachine
"
]
,
{
"
_id
"
:
"
dragon.doc
"
,
"
author
"
:
"
Dr. No
"
}
,
"
1985
"
:
[
"
rocket.ppt
"
]
,
{
"
_id
"
:
"
timemachine
"
,
"
author
"
:
"
Dr. Who
"
}
,
"
2005
"
:[
"
stick.jpg
"
]
{
"
_id
"
:
"
rocket.ppt
"
,
"
author
"
:
"
Dr. Snuggles
"
},
}
{
"
_id
"
:
"
stick.jpg
"
,
"
author
"
:
"
Dr. House
"
}
}
]
};
};
o
.
jio
.
get
({
"
_id
"
:
"
iall_indices.json
"
},
function
(
err
,
response
)
{
o
.
spy
(
o
,
"
value
"
,
o
.
fakeIndexA
,
"
Check index file
"
);
o
.
actualIndex
=
response
;
o
.
jio
.
get
({
"
_id
"
:
"
A
"
},
o
.
f
);
deepEqual
(
o
.
actualIndex
,
o
.
fakeIndex
,
"
Check index file
"
);
});
o
.
tick
(
o
);
o
.
tick
(
o
);
o
.
thisShouldBeTheAnswer
=
{
o
.
thisShouldBeTheAnswer
=
{
...
@@ -5026,19 +4936,6 @@ test ("AllDocs", function () {
...
@@ -5026,19 +4936,6 @@ test ("AllDocs", function () {
o
.
jio
.
allDocs
(
o
.
f
);
o
.
jio
.
allDocs
(
o
.
f
);
o
.
tick
(
o
);
o
.
tick
(
o
);
o
.
thisShouldBeTheAnswer2
=
{
"
rows
"
:
[
{
"
id
"
:
"
dragon.doc
"
,
"
key
"
:
"
dragon.doc
"
,
"
value
"
:
{},
"
doc
"
:
o
.
all1
},
{
"
id
"
:
"
timemachine
"
,
"
key
"
:
"
timemachine
"
,
"
value
"
:
{},
"
doc
"
:
o
.
all2
},
{
"
id
"
:
"
rocket.ppt
"
,
"
key
"
:
"
rocket.ppt
"
,
"
value
"
:
{},
"
doc
"
:
o
.
all3
},
{
"
id
"
:
"
stick.jpg
"
,
"
key
"
:
"
stick.jpg
"
,
"
value
"
:
{},
"
doc
"
:
o
.
all4
}
],
"
total_rows
"
:
4
}
o
.
spy
(
o
,
"
value
"
,
o
.
thisShouldBeTheAnswer2
,
"
allDocs (include_docs)
"
);
o
.
jio
.
allDocs
({
"
include_docs
"
:
true
},
o
.
f
);
o
.
tick
(
o
);
o
.
jio
.
stop
();
o
.
jio
.
stop
();
});
});
...
@@ -5049,16 +4946,11 @@ test ("AllDocs Complex Queries", function () {
...
@@ -5049,16 +4946,11 @@ test ("AllDocs Complex Queries", function () {
o
.
jio
=
JIO
.
newJio
({
o
.
jio
=
JIO
.
newJio
({
"
type
"
:
"
indexed
"
,
"
type
"
:
"
indexed
"
,
"
indices
"
:
[
"
indices
"
:
[
{
"
name
"
:
"
indexA
"
,
"
fields
"
:
[
"
director
"
]},
{
"
id
"
:
"
A
"
,
"
index
"
:
[
"
director
"
]},
{
"
name
"
:
"
indexAB
"
,
"
fields
"
:[
"
title
"
,
"
year
"
]}
{
"
id
"
:
"
B
"
,
"
index
"
:
[
"
title
"
,
"
year
"
]}
//,
//,
//{"name":"indexABC", "fields":["title","year","director"]}
//{"name":"indexABC", "fields":["title","year","director"]}
],
],
"
field_types
"
:
{
"
director
"
:
"
string
"
,
"
title
"
:
"
string
"
,
"
year
"
:
"
number
"
},
"
sub_storage
"
:
{
"
sub_storage
"
:
{
"
type
"
:
"
local
"
,
"
type
"
:
"
local
"
,
"
username
"
:
"
icomplex
"
,
"
username
"
:
"
icomplex
"
,
...
@@ -5083,7 +4975,7 @@ test ("AllDocs Complex Queries", function () {
...
@@ -5083,7 +4975,7 @@ test ("AllDocs Complex Queries", function () {
"
Sidney Lumet
"
,
"
Christopher Nolan
"
,
"
Steven Spielberg
"
,
"
Sidney Lumet
"
,
"
Christopher Nolan
"
,
"
Steven Spielberg
"
,
"
Peter Jackson
"
,
"
David Fincher
"
,
"
Irvin Kershner
"
,
"
Peter Jackson
"
,
"
Peter Jackson
"
,
"
David Fincher
"
,
"
Irvin Kershner
"
,
"
Peter Jackson
"
,
"
Milos Forman
"
,
"
Christopher Nolan
"
,
"
Martin Scorsese
"
"
Milos Forman
"
,
"
Christopher Nolan
"
,
"
Martin Scorsese
"
]
]
;
for
(
i
=
0
;
i
<
m
;
i
+=
1
)
{
for
(
i
=
0
;
i
<
m
;
i
+=
1
)
{
o
.
fakeDoc
=
{};
o
.
fakeDoc
=
{};
...
@@ -5094,6 +4986,7 @@ test ("AllDocs Complex Queries", function () {
...
@@ -5094,6 +4986,7 @@ test ("AllDocs Complex Queries", function () {
o
.
jio
.
put
(
o
.
fakeDoc
);
o
.
jio
.
put
(
o
.
fakeDoc
);
o
.
clock
.
tick
(
1000
);
o
.
clock
.
tick
(
1000
);
}
}
// o.clock.tick(1000);
// response
// response
o
.
allDocsResponse
=
{};
o
.
allDocsResponse
=
{};
...
@@ -5101,123 +4994,90 @@ test ("AllDocs Complex Queries", function () {
...
@@ -5101,123 +4994,90 @@ test ("AllDocs Complex Queries", function () {
o
.
allDocsResponse
.
total_rows
=
15
;
o
.
allDocsResponse
.
total_rows
=
15
;
for
(
i
=
0
;
i
<
m
;
i
+=
1
)
{
for
(
i
=
0
;
i
<
m
;
i
+=
1
)
{
o
.
allDocsResponse
.
rows
.
push
({
o
.
allDocsResponse
.
rows
.
push
({
"
id
"
:
""
+
i
,
"
key
"
:
""
+
i
,
"
value
"
:
{}
});
};
// alldocs
o
.
jio
.
allDocs
(
function
(
e
,
r
)
{
var
x
=
r
.
rows
.
sort
(
o
.
sortArrayById
(
'
id
'
,
true
,
parseInt
));
deepEqual
(
{
"
total_rows
"
:
r
.
total_rows
,
"
rows
"
:
x
},
o
.
allDocsResponse
,
"
AllDocs response generated from index
"
);
});
o
.
clock
.
tick
(
1000
);
// include docs
o
.
allDocsResponse2
=
{};
o
.
allDocsResponse2
.
rows
=
[];
o
.
allDocsResponse2
.
total_rows
=
15
;
for
(
i
=
0
;
i
<
m
;
i
+=
1
)
{
o
.
allDocsResponse2
.
rows
.
push
({
"
id
"
:
""
+
i
,
"
id
"
:
""
+
i
,
"
key
"
:
""
+
i
,
"
key
"
:
""
+
i
,
"
value
"
:
{},
"
value
"
:
{},
"
doc
"
:
localstorage
.
getItem
(
o
.
localpath
+
"
/
"
+
i
)
"
doc
"
:
{
"
_id
"
:
""
+
i
,
"
title
"
:
o
.
titles
[
i
],
"
year
"
:
o
.
years
[
i
],
"
director
"
:
o
.
director
[
i
]
}
});
});
};
}
o
.
response
=
JSON
.
parse
(
JSON
.
stringify
(
o
.
allDocsResponse
));
for
(
i
=
0
;
i
<
o
.
response
.
rows
.
length
;
i
+=
1
)
{
delete
o
.
response
.
rows
[
i
].
doc
;
}
// alldocs
// alldocs
o
.
jio
.
allDocs
({
"
include_docs
"
:
true
},
function
(
e
,
r
)
{
o
.
spy
(
o
,
"
value
"
,
o
.
response
,
"
AllDocs response generated from index
"
);
var
x
=
r
.
rows
.
sort
(
o
.
sortArrayById
(
'
id
'
,
true
,
parseInt
));
o
.
jio
.
allDocs
(
o
.
f
);
deepEqual
(
o
.
tick
(
o
,
1000
);
{
"
total_rows
"
:
r
.
total_rows
,
"
rows
"
:
x
},
o
.
allDocsResponse2
,
"
AllDocs response generated from index (include docs)
"
);
});
o
.
clock
.
tick
(
1000
);
// complex queries
// complex queries
o
.
thisShouldBeTheAnswer4
=
[
o
.
response
=
JSON
.
parse
(
JSON
.
stringify
(
o
.
allDocsResponse
));
{
"
title
"
:
"
Inception
"
,
"
year
"
:
2010
},
i
=
0
;
{
"
title
"
:
"
The Dark Knight
"
,
"
year
"
:
2008
},
while
(
i
<
o
.
response
.
rows
.
length
)
{
{
"
title
"
:
"
Lord of the Rings - Return of the King
"
,
"
year
"
:
2003
},
if
(
o
.
response
.
rows
[
i
].
year
<
1980
)
{
{
"
title
"
:
"
Lord Of the Rings - Fellowship of the Ring
"
,
"
year
"
:
2001
},
o
.
response
.
rows
.
splice
(
i
,
1
);
{
"
title
"
:
"
Fight Club
"
,
"
year
"
:
1999
}
}
else
{
];
o
.
response
.
rows
[
i
].
value
=
{
o
.
spy
(
o
,
"
value
"
,
o
.
thisShouldBeTheAnswer4
,
"
year
"
:
o
.
response
.
rows
[
i
].
doc
.
year
,
"
title
"
:
o
.
response
.
rows
[
i
].
doc
.
title
}
delete
o
.
response
.
rows
[
i
].
doc
;
i
+=
1
;
}
}
o
.
response
.
rows
.
sort
(
function
(
a
,
b
)
{
return
a
.
value
.
year
>
b
.
value
.
year
?
-
1
:
a
.
value
.
year
<
b
.
value
.
year
?
1
:
0
;
});
o
.
response
.
rows
.
length
=
5
;
o
.
response
.
total_rows
=
5
;
o
.
spy
(
o
,
"
value
"
,
o
.
response
,
"
allDocs (complex queries year >= 1980, index used to do query)
"
);
"
allDocs (complex queries year >= 1980, index used to do query)
"
);
o
.
jio
.
allDocs
({
o
.
jio
.
allDocs
({
"
query
"
:{
// "query":'(year: >= "1980" AND year: < "2000")',
// "query":'(year: >= "1980" AND year: < "2000")',
"
query
"
:
'
(year: >= "1980")
'
,
"
query
"
:
'
(year: >= "1980")
'
,
"
filter
"
:
{
"
limit
"
:
[
0
,
5
],
"
limit
"
:[
0
,
5
],
"
sort_on
"
:
[[
'
year
'
,
'
descending
'
]],
"
sort_on
"
:[[
'
year
'
,
'
descending
'
]],
"
select_list
"
:
[
'
title
'
,
'
year
'
]
"
select_list
"
:[
'
title
'
,
'
year
'
]
},
"
wildcard_character
"
:
'
%
'
}
},
o
.
f
);
},
o
.
f
);
o
.
tick
(
o
);
o
.
tick
(
o
);
// complex queries
// complex queries
o
.
thisShouldBeTheAnswer5
=
[
o
.
spy
(
o
,
"
value
"
,
{
"
total_rows
"
:
0
,
"
rows
"
:
[]},
{
"
director
"
:
"
Christopher Nolan
"
,
"
year
"
:
2010
},
{
"
director
"
:
"
Christopher Nolan
"
,
"
year
"
:
2008
},
{
"
director
"
:
"
Peter Jackson
"
,
"
year
"
:
2003
},
{
"
director
"
:
"
Peter Jackson
"
,
"
year
"
:
2001
},
{
"
director
"
:
"
David Fincher
"
,
"
year
"
:
1999
}
];
o
.
spy
(
o
,
"
value
"
,
o
.
thisShouldBeTheAnswer5
,
"
allDocs (complex queries year >= 1980, can't use index)
"
);
"
allDocs (complex queries year >= 1980, can't use index)
"
);
o
.
jio
.
allDocs
({
o
.
jio
.
allDocs
({
"
query
"
:{
// "query":'(year: >= "1980" AND year: < "2000")',
// "query":'(year: >= "1980" AND year: < "2000")',
"
query
"
:
'
(year: >= "1980")
'
,
"
query
"
:
'
(year: >= "1980")
'
,
"
filter
"
:
{
"
limit
"
:
[
0
,
5
],
"
limit
"
:[
0
,
5
],
"
sort_on
"
:
[[
'
year
'
,
'
descending
'
]],
"
sort_on
"
:[[
'
year
'
,
'
descending
'
]],
"
select_list
"
:
[
'
director
'
,
'
year
'
]
"
select_list
"
:[
'
director
'
,
'
year
'
]
},
"
wildcard_character
"
:
'
%
'
}
},
o
.
f
);
},
o
.
f
);
o
.
tick
(
o
);
o
.
tick
(
o
);
// empty query returns all
// empty query returns all
o
.
thisShouldBeTheAnswer6
=
[
o
.
response
=
JSON
.
parse
(
JSON
.
stringify
(
o
.
allDocsResponse
));
{
"
title
"
:
"
The Good, The Bad and The Ugly
"
},
i
=
0
;
{
"
title
"
:
"
The Dark Knight
"
},
while
(
i
<
o
.
response
.
rows
.
length
)
{
{
"
title
"
:
"
Star Wars Episode V
"
},
o
.
response
.
rows
[
i
].
value
.
title
=
{
"
title
"
:
"
Shawshank Redemption
"
},
o
.
response
.
rows
[
i
].
doc
.
title
;
{
"
title
"
:
"
Schindlers List
"
},
delete
o
.
response
.
rows
[
i
].
doc
;
{
"
title
"
:
"
Pulp Fiction
"
},
i
+=
1
;
{
"
title
"
:
"
One flew over the Cuckoo's Nest
"
},
}
{
"
title
"
:
"
Lord of the Rings - Return of the King
"
},
o
.
response
.
rows
.
sort
(
function
(
a
,
b
)
{
{
"
title
"
:
"
Lord Of the Rings - Fellowship of the Ring
"
},
return
a
.
value
.
title
>
b
.
value
.
title
?
-
1
:
{
"
title
"
:
"
Inception
"
},
a
.
value
.
title
<
b
.
value
.
title
?
1
:
0
;
{
"
title
"
:
"
Godfellas
"
},
});
{
"
title
"
:
"
Godfather 2
"
},
o
.
spy
(
o
,
"
value
"
,
o
.
response
,
{
"
title
"
:
"
Godfather
"
},
{
"
title
"
:
"
Fight Club
"
},
{
"
title
"
:
"
12 Angry Men
"
}
];
o
.
spy
(
o
,
"
value
"
,
o
.
thisShouldBeTheAnswer6
,
"
allDocs (empty query in complex query)
"
);
"
allDocs (empty query in complex query)
"
);
o
.
jio
.
allDocs
({
o
.
jio
.
allDocs
({
"
query
"
:{
"
filter
"
:
{
"
sort_on
"
:[[
'
title
'
,
'
descending
'
]],
"
sort_on
"
:[[
'
title
'
,
'
descending
'
]],
"
select_list
"
:[
'
title
'
]
"
select_list
"
:[
'
title
'
]
},
"
wildcard_character
"
:
'
%
'
}
},
o
.
f
);
},
o
.
f
);
o
.
tick
(
o
);
o
.
tick
(
o
);
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment