Skip to content

Commit

Permalink
Merge branch 'develop-3.4' into mas-d31-mas.i1433-filterexpression
Browse files Browse the repository at this point in the history
  • Loading branch information
martinsumner committed Nov 18, 2024
2 parents bf4c345 + c642575 commit c340ac3
Show file tree
Hide file tree
Showing 7 changed files with 138 additions and 60 deletions.
4 changes: 2 additions & 2 deletions src/leveled_bookie.erl
Original file line number Diff line number Diff line change
Expand Up @@ -2514,13 +2514,13 @@ recalcfor_ledgercache(
not_present;
{LK, LV} ->
case leveled_codec:get_metadata(LV) of
MDO when MDO =/= null ->
MDO when is_tuple(MDO) ->
MDO
end
end,
UpdMetadata =
case leveled_codec:get_metadata(MetaValue) of
MDU when MDU =/= null ->
MDU when is_tuple(MDU) ->
MDU
end,
IdxSpecs =
Expand Down
29 changes: 15 additions & 14 deletions src/leveled_codec.erl
Original file line number Diff line number Diff line change
Expand Up @@ -73,8 +73,9 @@
-type segment_hash() ::
% hash of the key to an aae segment - to be used in ledger filters
{integer(), integer()}|no_lookup.
-type head_value() :: any().
-type metadata() ::
tuple()|null. % null for empty metadata
tuple()|null|head_value(). % null for empty metadata
-type last_moddate() ::
% modified date as determined by the object (not this store)
% if the object has siblings in the store will be the maximum of those
Expand Down Expand Up @@ -185,7 +186,8 @@
actual_regex/0,
value_fetcher/0,
proxy_object/0,
slimmed_key/0
slimmed_key/0,
head_value/0
]).


Expand Down Expand Up @@ -469,6 +471,8 @@ to_querykey(Bucket, Key, Tag, Field, Value) when Tag == ?IDX_TAG ->
-spec to_querykey(key()|null, key()|null, tag()) -> query_key().
%% @doc
%% Convert something into a ledger query key
to_querykey(Bucket, {Key, SubKey}, Tag) ->
{Tag, Bucket, Key, SubKey};
to_querykey(Bucket, Key, Tag) ->
{Tag, Bucket, Key, null}.

Expand Down Expand Up @@ -820,19 +824,16 @@ gen_headspec(
gen_headspec({IdxOp, v1, Bucket, Key, SubKey, undefined, Value}, SQN, TTL).


-spec return_proxy
(leveled_head:headonly_tag(), leveled_head:object_metadata(), null, journal_ref())
-> leveled_head:object_metadata();
(leveled_head:object_tag(), leveled_head:object_metadata(), pid(), journal_ref())
-> proxy_objectbin().
-spec return_proxy(
leveled_head:object_tag(),
leveled_head:object_metadata(),
pid(),
journal_ref()) -> proxy_objectbin().
%% @doc
%% If the object has a value, return the metadata and a proxy through which
%% the applictaion or runner can access the value. If it is a ?HEAD_TAG
%% then it has no value, so just return the metadata
return_proxy(?HEAD_TAG, ObjectMetadata, _InkerClone, _JR) ->
% Object has no value - so proxy object makese no sense, just return the
% metadata as is
ObjectMetadata;
%% the application or runner can access the value.
%% This is only called if there is an object tag - i.e. ?RIAK_TAG//STD_TAG or
%% a user-defined tag that uses ObjMetadata in the ?STD_TAG format
return_proxy(Tag, ObjMetadata, InkerClone, JournalRef) ->
Size = leveled_head:get_size(Tag, ObjMetadata),
HeadBin = leveled_head:build_head(Tag, ObjMetadata),
Expand Down Expand Up @@ -913,7 +914,7 @@ get_size(PK, Value) ->

-spec get_keyandobjhash(tuple(), tuple()) -> tuple().
%% @doc
%% Return a tucple of {Bucket, Key, Hash} where hash is a hash of the object
%% Return a tuple of {Bucket, Key, Hash} where hash is a hash of the object
%% not the key (for example with Riak tagged objects this will be a hash of
%% the sorted vclock)
get_keyandobjhash(LK, Value) ->
Expand Down
73 changes: 36 additions & 37 deletions src/leveled_head.erl
Original file line number Diff line number Diff line change
Expand Up @@ -49,29 +49,31 @@
-type headonly_tag() :: ?HEAD_TAG.
% Tag assigned to head_only objects. Behaviour cannot be changed

-type riak_metadata() :: {binary()|delete,
% Sibling Metadata
binary()|null,
% Vclock Metadata
non_neg_integer()|null,
% Hash of vclock - non-exportable
non_neg_integer()
% Size in bytes of real object
}.
-type std_metadata() :: {non_neg_integer()|null,
% Hash of value
non_neg_integer(),
% Size in bytes of real object
list(tuple())|undefined
% User-define metadata
}.
-type head_metadata() :: {non_neg_integer()|null,
% Hash of value
non_neg_integer()
% Size in bytes of real object
}.

-type object_metadata() :: riak_metadata()|std_metadata()|head_metadata().
-type riak_metadata() ::
{
binary()|delete,
% Sibling Metadata
binary()|null,
% Vclock Metadata
non_neg_integer()|null,
% Hash of vclock - non-exportable
non_neg_integer()
% Size in bytes of real object
}.
-type std_metadata() ::
{
non_neg_integer()|null,
% Hash of value
non_neg_integer(),
% Size in bytes of real object
list(tuple())|undefined
% User-define metadata
}.
% std_metadata() must be outputted as the metadata format by any
% app-defined function
-type head_metadata() :: leveled_codec:head_value().

-type object_metadata() :: riak_metadata()|std_metadata().

-type appdefinable_function() ::
key_to_canonicalbinary | build_head | extract_metadata | diff_indexspecs.
Expand All @@ -80,12 +82,12 @@
-type appdefinable_keyfun() ::
fun((tuple()) -> binary()).
-type appdefinable_headfun() ::
fun((object_tag(), object_metadata()) -> head()).
fun((object_tag(), std_metadata()) -> head()).
-type appdefinable_metadatafun() ::
fun((leveled_codec:tag(), non_neg_integer(), binary()|delete) ->
{object_metadata(), list(erlang:timestamp())}).
{std_metadata(), list(erlang:timestamp())}).
-type appdefinable_indexspecsfun() ::
fun((object_tag(), object_metadata(), object_metadata()|not_present) ->
fun((object_tag(), std_metadata(), std_metadata()|not_present) ->
leveled_codec:index_specs()).
-type appdefinable_function_fun() ::
appdefinable_keyfun() | appdefinable_headfun() |
Expand All @@ -96,12 +98,7 @@
-type index_op() :: add | remove.
-type index_value() :: integer() | binary().

-type head() ::
binary()|tuple().
% TODO:
% This is currently not always a binary. Wish is to migrate this so that
% it is predictably a binary

-type head() :: binary()|tuple()|head_metadata().

-export_type([object_tag/0,
headonly_tag/0,
Expand Down Expand Up @@ -143,7 +140,9 @@ default_key_to_canonicalbinary(Key) ->
leveled_util:t2b(Key).


-spec build_head(object_tag()|headonly_tag(), object_metadata()) -> head().
-spec build_head
(object_tag(), object_metadata()) -> head();
(headonly_tag(), head_metadata()) -> head() .
%% @doc
%% Return the object metadata as a binary to be the "head" of the object
build_head(?HEAD_TAG, Value) ->
Expand Down Expand Up @@ -253,22 +252,22 @@ default_reload_strategy(Tag) ->
{Tag, retain}.

-spec get_size(
object_tag()|headonly_tag(), object_metadata()) -> non_neg_integer().
object_tag(), object_metadata()) -> non_neg_integer().
%% @doc
%% Fetch the size from the metadata
get_size(?RIAK_TAG, {_, _, _, Size}) ->
Size;
get_size(_Tag, {_, Size, _}) ->
get_size(Tag, {_, Size, _}) when Tag =/= ?HEAD_TAG->
Size.


-spec get_hash(
object_tag()|headonly_tag(), object_metadata()) -> non_neg_integer()|null.
object_tag(), object_metadata()) -> non_neg_integer()|null.
%% @doc
%% Fetch the hash from the metadata
get_hash(?RIAK_TAG, {_, _, Hash, _}) ->
Hash;
get_hash(_Tag, {Hash, _, _}) ->
get_hash(Tag, {Hash, _, _}) when Tag =/= ?HEAD_TAG ->
Hash.

-spec standard_hash(any()) -> non_neg_integer().
Expand Down
14 changes: 10 additions & 4 deletions src/leveled_log.erl
Original file line number Diff line number Diff line change
Expand Up @@ -527,13 +527,19 @@ log_wrongkey_test() ->
error,
{badkey, wrong0001},
log(wrong0001, [],[warning, error], ?LOGBASE, backend)
),
).

logtimer_wrongkey_test() ->
ST = os:timestamp(),
% Note -
% An issue with cover means issues with ?assertException, where the
% function being tested is split across lines, the closing bracket on the
% next line is not recognised as being covered. We want 100% coverage, so
% need to write this on one line.
?assertException(
error,
{badkey, wrong0001},
log_timer(
wrong0001, [], os:timestamp(), [warning, error], ?LOGBASE, backend
)
log_timer(wrong0001, [], ST, [warning, error], ?LOGBASE, backend)
).

shouldilog_test() ->
Expand Down
4 changes: 3 additions & 1 deletion src/leveled_runner.erl
Original file line number Diff line number Diff line change
Expand Up @@ -635,7 +635,9 @@ accumulate_objects(FoldObjectsFun, InkerClone, Tag, DeferredFetch) ->
end,
JK = {leveled_codec:to_objectkey(B, K, Tag), SQN},
case DeferredFetch of
{true, JournalCheck} when MD =/= null ->
{true, false} when Tag == ?HEAD_TAG ->
FoldObjectsFun(B, K, MD, Acc);
{true, JournalCheck} when is_tuple(MD) ->
ProxyObj =
leveled_codec:return_proxy(Tag, MD, InkerClone, JK),
case {JournalCheck, InkerClone} of
Expand Down
2 changes: 1 addition & 1 deletion src/leveled_sst.erl
Original file line number Diff line number Diff line change
Expand Up @@ -3377,7 +3377,7 @@ generate_randomkeys(Seqn, Count, Acc, BucketLow, BRange) ->
Chunk = crypto:strong_rand_bytes(64),
MV = leveled_codec:convert_to_ledgerv(LK, Seqn, Chunk, 64, infinity),
MD = element(4, MV),
MD =/= null orelse error(bad_type),
is_tuple(MD) orelse error(bad_type),
?assertMatch(undefined, element(3, MD)),
MD0 = [{magic_md, [<<0:32/integer>>, base64:encode(Chunk)]}],
MV0 = setelement(4, MV, setelement(3, MD, MD0)),
Expand Down
72 changes: 71 additions & 1 deletion test/end_to_end/tictac_SUITE.erl
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,15 @@
-include("leveled.hrl").
-export([all/0, init_per_suite/1, end_per_suite/1]).
-export([
multiput_subkeys/1,
many_put_compare/1,
index_compare/1,
basic_headonly/1,
tuplebuckets_headonly/1
]).

all() -> [
multiput_subkeys,
many_put_compare,
index_compare,
basic_headonly,
Expand All @@ -25,8 +27,76 @@ init_per_suite(Config) ->
end_per_suite(Config) ->
testutil:end_per_suite(Config).

many_put_compare(_Config) ->

multiput_subkeys(_Config) ->
multiput_subkeys_byvalue({null, 0}),
multiput_subkeys_byvalue(null),
multiput_subkeys_byvalue(<<"binaryValue">>).

multiput_subkeys_byvalue(V) ->
RootPath = testutil:reset_filestructure("subkeyTest"),
StartOpts = [{root_path, RootPath},
{max_journalsize, 10000000},
{max_pencillercachesize, 12000},
{head_only, no_lookup},
{sync_strategy, testutil:sync_strategy()}],
{ok, Bookie} = leveled_bookie:book_start(StartOpts),
SubKeyCount = 200000,

B = {<<"MultiBucketType">>, <<"MultiBucket">>},
ObjSpecLGen =
fun(K) ->
lists:map(
fun(I) ->
{add, v1, B, K, <<I:32/integer>>, [os:timestamp()], V}
end,
lists:seq(1, SubKeyCount)
)
end,

SpecL1 = ObjSpecLGen(<<1:32/integer>>),
load_objectspecs(SpecL1, 32, Bookie),
SpecL2 = ObjSpecLGen(<<2:32/integer>>),
load_objectspecs(SpecL2, 32, Bookie),
SpecL3 = ObjSpecLGen(<<3:32/integer>>),
load_objectspecs(SpecL3, 32, Bookie),
SpecL4 = ObjSpecLGen(<<4:32/integer>>),
load_objectspecs(SpecL4, 32, Bookie),
SpecL5 = ObjSpecLGen(<<5:32/integer>>),
load_objectspecs(SpecL5, 32, Bookie),

FoldFun =
fun(Bucket, {Key, SubKey}, _Value, Acc) ->
case Bucket of
Bucket when Bucket == B ->
[{Key, SubKey}|Acc]
end
end,
QueryFun =
fun(KeyRange) ->
Range = {range, B, KeyRange},
{async, R} =
leveled_bookie:book_headfold(
Bookie, ?HEAD_TAG, Range, {FoldFun, []}, false, true, false
),
L = length(R()),
io:format("query result for range ~p is ~w~n", [Range, L]),
L
end,

KR1 = {{<<1:32/integer>>, <<>>}, {<<2:32/integer>>, <<>>}},
KR2 = {{<<3:32/integer>>, <<>>}, {<<5:32/integer>>, <<>>}},
KR3 =
{
{<<1:32/integer>>, <<10:32/integer>>},
{<<2:32/integer>>, <<19:32/integer>>}
},
true = SubKeyCount == QueryFun(KR1),
true = (SubKeyCount * 2) == QueryFun(KR2),
true = (SubKeyCount + 10) == QueryFun(KR3),
leveled_bookie:book_destroy(Bookie).

many_put_compare(_Config) ->
TreeSize = small,
SegmentCount = 256 * 256,
% Test requires multiple different databases, so want to mount them all
Expand Down

0 comments on commit c340ac3

Please sign in to comment.