Skip to content

Commit

Permalink
Improve perf_SUITE test (#445)
Browse files Browse the repository at this point in the history
* Improve perf_SUITE test

The update teat is refactored so as not to generate. a large KV list which dominates the memory utilisation.

The update and the get tests changes to do a head before each operation - which emulates how this will work in RIAK.

* Revert default setting change

* Don't pre-calculate key list

For fetches - reduces memory required for test process not database (and consequent distortion to measured results)

* Tidy ++ in tests

Removes some rogue results from profile

* Update testutil.erl

* Test fixes

* Tidy generate_chunk for profiling

* Revert "Tidy generate_chunk for profiling"

This reverts commit 1f6cff4.

* Resize profile test
  • Loading branch information
martinsumner authored Sep 2, 2024
1 parent 7b5b18e commit acf3059
Show file tree
Hide file tree
Showing 5 changed files with 181 additions and 81 deletions.
8 changes: 1 addition & 7 deletions src/leveled_ebloom.erl
Original file line number Diff line number Diff line change
Expand Up @@ -302,13 +302,7 @@ split_builder_speed_tester() ->
Timings =
lists:map(
fun(HashList) ->
SlotCount =
case length(HashList) of
0 ->
0;
L ->
min(128, max(2, (L - 1) div 512))
end,
SlotCount = min(128, max(2, (length(HashList) - 1) div 512)),
InitTuple = list_to_tuple(lists:duplicate(SlotCount, [])),
{MTC, SlotHashes} =
timer:tc(
Expand Down
8 changes: 6 additions & 2 deletions src/leveled_head.erl
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,12 @@
]).

%% Exported for testing purposes
-export([riak_metadata_to_binary/2,
riak_extract_metadata/2]).
-export(
[
riak_metadata_to_binary/2,
riak_extract_metadata/2,
get_indexes_from_siblingmetabin/2
]).


-define(MAGIC, 53). % riak_kv -> riak_object
Expand Down
2 changes: 1 addition & 1 deletion src/leveled_pmanifest.erl
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@
% At o(10) trillion keys behaviour may become increasingly
% difficult to predict.

-if(OTP_RELEASE >= 25).
-if(?OTP_RELEASE >= 25).
-if(length(?LEVEL_SCALEFACTOR) /= ?MAX_LEVELS).
-error("length ?LEVEL_SCALEFACTOR differs from ?MAX_LEVELS").
-endif.
Expand Down
155 changes: 117 additions & 38 deletions test/end_to_end/perf_SUITE.erl
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
-define(PEOPLE_INDEX, <<"people_bin">>).
-define(MINI_QUERY_DIVISOR, 8).
-define(RGEX_QUERY_DIVISOR, 32).
-define(PUT_PAUSE, 40).

-ifndef(performance).
-define(performance, riak_ctperf).
Expand Down Expand Up @@ -92,13 +93,13 @@ riak_load_tester(Bucket, KeyCount, ObjSize, ProfileList, PM, LC) ->
fun(ListID) ->
fun() ->
RandInt = leveled_rand:uniform(IndexCount - 1),
IntIndex = "integer" ++ integer_to_list(ListID) ++ "_int",
BinIndex = "binary" ++ integer_to_list(ListID) ++ "_bin",
[{add, list_to_binary(IntIndex), RandInt},
IntIndex = ["integer", integer_to_list(ListID), "_int"],
BinIndex = ["binary", integer_to_list(ListID), "_bin"],
[{add, iolist_to_binary(IntIndex), RandInt},
{add, ?PEOPLE_INDEX, list_to_binary(random_people_index())},
{add, list_to_binary(IntIndex), RandInt + 1},
{add, list_to_binary(BinIndex), <<RandInt:32/integer>>},
{add, list_to_binary(BinIndex), <<(RandInt + 1):32/integer>>}]
{add, iolist_to_binary(IntIndex), RandInt + 1},
{add, iolist_to_binary(BinIndex), <<RandInt:32/integer>>},
{add, iolist_to_binary(BinIndex), <<(RandInt + 1):32/integer>>}]
end
end,

Expand Down Expand Up @@ -140,7 +141,7 @@ riak_load_tester(Bucket, KeyCount, ObjSize, ProfileList, PM, LC) ->
GetMemoryTracker = memory_tracking(get, 1000),
GetAccountant = accounting(get, 3000, ProfileList),
TotalGetTime =
random_fetches(get, Bookie1, Bucket, KeyCount, GetFetches),
random_fetches(riakget, Bookie1, Bucket, KeyCount div 2, GetFetches),
ok = stop_accounting(GetAccountant),
{MT2, MP2, MB2} = stop_tracker(GetMemoryTracker),

Expand Down Expand Up @@ -235,7 +236,7 @@ riak_load_tester(Bucket, KeyCount, ObjSize, ProfileList, PM, LC) ->
UpdateMemoryTracker = memory_tracking(update, 1000),
UpdateAccountant = accounting(update, 1000, ProfileList),
TotalUpdateTime =
rotate_chunk(Bookie1, <<"UpdBucket">>, KeyCount div 50, ObjSize),
rotate_chunk(Bookie1, <<"UpdBucket">>, KeyCount div 100, ObjSize, 2),
ok = stop_accounting(UpdateAccountant),
{MT6, MP6, MB6} = stop_tracker(UpdateMemoryTracker),

Expand Down Expand Up @@ -360,29 +361,103 @@ profile_app(Pids, ProfiledFun, P) ->

eprof:stop_profiling(),
eprof:log(atom_to_list(P) ++ ".log"),
eprof:analyze(total, [{filter, [{time, 150000}]}]),
eprof:analyze(total, [{filter, [{time, 160000}]}]),
eprof:stop(),
{ok, Analysis} = file:read_file(atom_to_list(P) ++ ".log"),
io:format(user, "~n~s~n", [Analysis])
.

rotate_chunk(Bookie, Bucket, KeyCount, ObjSize) ->
rotate_chunk(Bookie, Bucket, KeyCount, ObjSize, IdxCount) ->
ct:log(
?INFO,
"Rotating an ObjList ~w - "
"time includes object generation",
[KeyCount]),
V1 = base64:encode(leveled_rand:rand_bytes(ObjSize)),
V2 = base64:encode(leveled_rand:rand_bytes(ObjSize)),
V3 = base64:encode(leveled_rand:rand_bytes(ObjSize)),
{TC, ok} =
timer:tc(
fun() ->
testutil:rotation_withnocheck(
Bookie, Bucket, KeyCount, V1, V2, V3)
rotation_withnocheck(
Bookie, Bucket, KeyCount, ObjSize, IdxCount
)
end),
TC div 1000.


rotation_with_prefetch(_Book, _B, 0, _Value, _IdxCnt) ->
garbage_collect(),
ok;
rotation_with_prefetch(Book, B, Count, Value, IdxCnt) ->
H = erlang:phash2(Count),
H1 = H band 127,
H2 = (H bsr 7) band 127,
H3 = (H bsr 14) band 127,
H4 = (H bsr 21) band 63,
K = <<H1:8/integer, H2:8/integer, H3:8/integer, H4:8/integer>>,
IndexGen = testutil:get_randomindexes_generator(IdxCnt),
RemoveSpc =
case testutil:book_riakhead(Book, B, K) of
not_found ->
[];
{ok, Head} ->
{{SibMetaBin, _Vclock, _Hash, size}, _LMS}
= leveled_head:riak_extract_metadata(Head, size),
lists:map(
fun({Fld, Trm}) -> {add, Fld, Trm} end,
leveled_head:get_indexes_from_siblingmetabin(
SibMetaBin, []
)
)
end,
{O, DeltaSpecs} =
testutil:set_object(B, K, Value, IndexGen, RemoveSpc),
case testutil:book_riakput(Book, O, DeltaSpecs) of
ok ->
ok;
pause ->
timer:sleep(?PUT_PAUSE),
pause
end,
rotation_with_prefetch(Book, B, Count - 1, Value, IdxCnt).


rotation_withnocheck(Book, B, NumberOfObjects, ObjSize, IdxCnt) ->
rotation_with_prefetch(
Book,
B,
NumberOfObjects,
base64:encode(leveled_rand:rand_bytes(ObjSize)),
IdxCnt
),
rotation_with_prefetch(
Book,
B,
NumberOfObjects,
base64:encode(leveled_rand:rand_bytes(ObjSize)),
IdxCnt
),
rotation_with_prefetch(
Book,
B,
NumberOfObjects,
base64:encode(leveled_rand:rand_bytes(ObjSize)),
IdxCnt
),
rotation_with_prefetch(
Book,
B,
NumberOfObjects,
base64:encode(leveled_rand:rand_bytes(ObjSize)),
IdxCnt
),
rotation_with_prefetch(
Book,
B,
NumberOfObjects,
base64:encode(leveled_rand:rand_bytes(ObjSize)),
IdxCnt
),
ok.

generate_chunk(CountPerList, ObjSize, IndexGenFun, Bucket, Chunk) ->
testutil:generate_objects(
CountPerList,
Expand Down Expand Up @@ -430,7 +505,7 @@ time_load_chunk(
ok ->
ThisProcess! {TC, 0};
pause ->
timer:sleep(40),
timer:sleep(?PUT_PAUSE),
ThisProcess ! {TC + 40000, 1}
end
end
Expand Down Expand Up @@ -487,34 +562,38 @@ counter(Bookie, estimate) ->


random_fetches(FetchType, Bookie, Bucket, ObjCount, Fetches) ->
KeysToFetch =
lists:map(
fun(I) ->
Twenty = ObjCount div 5,
case I rem 5 of
1 ->
testutil:fixed_bin_key(
Twenty + leveled_rand:uniform(ObjCount - Twenty));
_ ->
testutil:fixed_bin_key(leveled_rand:uniform(Twenty))
end
end,
lists:seq(1, Fetches)
),
Twenty = ObjCount div 5,
KeyFun =
fun(I) ->
case I rem 5 of
1 ->
testutil:fixed_bin_key(
Twenty + leveled_rand:uniform(ObjCount - Twenty));
_ ->
testutil:fixed_bin_key(leveled_rand:uniform(Twenty))
end
end,
{TC, ok} =
timer:tc(
fun() ->
lists:foreach(
fun(K) ->
fun(I) ->
K = KeyFun(I),
{ok, _} =
case FetchType of
riakget ->
{ok, _} =
testutil:book_riakhead(
Bookie, Bucket, K
),
testutil:book_riakget(Bookie, Bucket, K);
get ->
testutil:book_riakget(Bookie, Bucket, K);
head ->
testutil:book_riakhead(Bookie, Bucket, K)
end
end,
KeysToFetch
lists:seq(1, Fetches)
)
end
),
Expand All @@ -530,7 +609,7 @@ random_queries(Bookie, Bucket, IDs, IdxCnt, MaxRange, IndexesReturned) ->
fun() ->
ID = leveled_rand:uniform(IDs),
BinIndex =
list_to_binary("binary" ++ integer_to_list(ID) ++ "_bin"),
iolist_to_binary(["binary", integer_to_list(ID), "_bin"]),
Twenty = IdxCnt div 5,
RI = leveled_rand:uniform(MaxRange),
[Start, End] =
Expand Down Expand Up @@ -615,21 +694,21 @@ profile_fun(
fun() ->
random_queries(
Bookie, Bucket, 10, IndexCount, QuerySize,
IndexesReturned div ?MINI_QUERY_DIVISOR)
(IndexesReturned * 2) div ?MINI_QUERY_DIVISOR)
end;
profile_fun(
{query, QuerySize},
{Bookie, Bucket, _KeyCount, _ObjSize, IndexCount, IndexesReturned}) ->
fun() ->
random_queries(
Bookie, Bucket, 10, IndexCount, QuerySize, IndexesReturned)
Bookie, Bucket, 10, IndexCount, QuerySize, IndexesReturned * 2)
end;
profile_fun(
regex_query,
{Bookie, Bucket, _KeyCount, _ObjSize, _IndexCount, IndexesReturned}) ->
fun() ->
random_people_queries(
Bookie, Bucket, IndexesReturned div ?RGEX_QUERY_DIVISOR)
Bookie, Bucket, (IndexesReturned * 2) div ?RGEX_QUERY_DIVISOR)
end;
profile_fun(
{head, HeadFetches},
Expand All @@ -655,7 +734,7 @@ profile_fun(
update,
{Bookie, _Bucket, KeyCount, ObjSize, _IndexCount, _IndexesReturned}) ->
fun() ->
rotate_chunk(Bookie, <<"ProfileB">>, KeyCount div 50, ObjSize)
rotate_chunk(Bookie, <<"ProfileB">>, KeyCount div 100, ObjSize, 2)
end;
profile_fun(
CounterFold,
Expand All @@ -665,7 +744,7 @@ profile_fun(
full ->
20;
estimate ->
40;
50;
guess ->
100
end,
Expand Down
Loading

0 comments on commit acf3059

Please sign in to comment.