[tarantool-patches] Re: [PATCH v6 1/3] box: refactor key_def_set_compare_func routine
Kirill Shcherbatov
kshcherbatov at tarantool.org
Fri Mar 15 13:20:11 MSK 2019
Refactored key_def_set_compare_func routine to make it easier to
read, maintain, and extend. This is necessary due to the fact
that in a series of subsequent patches it will be significantly
expanded.
Needed for #3961
---
src/box/tuple_compare.cc | 195 +++++++++++++++++++--------------------
1 file changed, 93 insertions(+), 102 deletions(-)
diff --git a/src/box/tuple_compare.cc b/src/box/tuple_compare.cc
index dfe30b190..7fe1766a8 100644
--- a/src/box/tuple_compare.cc
+++ b/src/box/tuple_compare.cc
@@ -986,7 +986,7 @@ struct comparator_signature {
/**
* field1 no, field1 type, field2 no, field2 type, ...
*/
-static const comparator_signature cmp_arr[] = {
+static const comparator_signature precompiled_cmp_arr[] = {
COMPARATOR(0, FIELD_TYPE_UNSIGNED)
COMPARATOR(0, FIELD_TYPE_STRING)
COMPARATOR(0, FIELD_TYPE_UNSIGNED, 1, FIELD_TYPE_UNSIGNED)
@@ -1005,55 +1005,6 @@ static const comparator_signature cmp_arr[] = {
#undef COMPARATOR
-static const tuple_compare_t compare_slowpath_funcs[] = {
- tuple_compare_slowpath<false, false, false>,
- tuple_compare_slowpath<true, false, false>,
- tuple_compare_slowpath<false, true, false>,
- tuple_compare_slowpath<true, true, false>,
- tuple_compare_slowpath<false, false, true>,
- tuple_compare_slowpath<true, false, true>,
- tuple_compare_slowpath<false, true, true>,
- tuple_compare_slowpath<true, true, true>
-};
-
-static tuple_compare_t
-tuple_compare_create(const struct key_def *def)
-{
- int cmp_func_idx = (def->is_nullable ? 1 : 0) +
- 2 * (def->has_optional_parts ? 1 : 0) +
- 4 * (def->has_json_paths ? 1 : 0);
- if (def->is_nullable) {
- if (key_def_is_sequential(def)) {
- if (def->has_optional_parts)
- return tuple_compare_sequential<true, true>;
- else
- return tuple_compare_sequential<true, false>;
- } else {
- return compare_slowpath_funcs[cmp_func_idx];
- }
- }
- assert(! def->has_optional_parts);
- if (!key_def_has_collation(def) && !def->has_json_paths) {
- /* Precalculated comparators don't use collation */
- for (uint32_t k = 0;
- k < sizeof(cmp_arr) / sizeof(cmp_arr[0]); k++) {
- uint32_t i = 0;
- for (; i < def->part_count; i++)
- if (def->parts[i].fieldno !=
- cmp_arr[k].p[i * 2] ||
- def->parts[i].type !=
- cmp_arr[k].p[i * 2 + 1])
- break;
- if (i == def->part_count &&
- cmp_arr[k].p[i * 2] == UINT32_MAX)
- return cmp_arr[k].f;
- }
- }
- return key_def_is_sequential(def) ?
- tuple_compare_sequential<false, false> :
- compare_slowpath_funcs[cmp_func_idx];
-}
-
/* }}} tuple_compare */
/* {{{ tuple_compare_with_key */
@@ -1215,7 +1166,7 @@ struct comparator_with_key_signature
#define KEY_COMPARATOR(...) \
{ TupleCompareWithKey<0, __VA_ARGS__>::compare, { __VA_ARGS__ } },
-static const comparator_with_key_signature cmp_wk_arr[] = {
+static const comparator_with_key_signature precompiled_cmp_wk_arr[] = {
KEY_COMPARATOR(0, FIELD_TYPE_UNSIGNED, 1, FIELD_TYPE_UNSIGNED, 2, FIELD_TYPE_UNSIGNED)
KEY_COMPARATOR(0, FIELD_TYPE_STRING , 1, FIELD_TYPE_UNSIGNED, 2, FIELD_TYPE_UNSIGNED)
KEY_COMPARATOR(0, FIELD_TYPE_UNSIGNED, 1, FIELD_TYPE_STRING , 2, FIELD_TYPE_UNSIGNED)
@@ -1231,68 +1182,108 @@ static const comparator_with_key_signature cmp_wk_arr[] = {
KEY_COMPARATOR(1, FIELD_TYPE_STRING , 2, FIELD_TYPE_STRING)
};
-#undef KEY_COMPARATOR
-
-static const tuple_compare_with_key_t compare_with_key_slowpath_funcs[] = {
- tuple_compare_with_key_slowpath<false, false, false>,
- tuple_compare_with_key_slowpath<true, false, false>,
- tuple_compare_with_key_slowpath<false, true, false>,
- tuple_compare_with_key_slowpath<true, true, false>,
- tuple_compare_with_key_slowpath<false, false, true>,
- tuple_compare_with_key_slowpath<true, false, true>,
- tuple_compare_with_key_slowpath<false, true, true>,
- tuple_compare_with_key_slowpath<true, true, true>
+#undef KEY_COMPARATORq
+
+#define COMPARE_SLOWPATH(...) \
+ {tuple_compare_slowpath<__VA_ARGS__>, \
+ tuple_compare_with_key_slowpath<__VA_ARGS__>, \
+ __VA_ARGS__, false}
+
+#define COMPARE_SEQUENTIAL(...) \
+ {tuple_compare_sequential<__VA_ARGS__>, \
+ tuple_compare_with_key_sequential<__VA_ARGS__>, \
+ __VA_ARGS__, false, true}
+
+static struct {
+ tuple_compare_t tuple_compare;
+ tuple_compare_with_key_t tuple_compare_with_key;
+ bool is_nullable;
+ bool has_optional_parts;
+ bool has_json_paths;
+ bool is_sequential;
+} cmp_arr[] = {
+ COMPARE_SLOWPATH(false, false, false),
+ COMPARE_SLOWPATH(true, false, false),
+ COMPARE_SLOWPATH(false, true, false),
+ COMPARE_SLOWPATH(true, true, false),
+ COMPARE_SLOWPATH(false, false, true),
+ COMPARE_SLOWPATH(true, false, true),
+ COMPARE_SLOWPATH(false, true, true),
+ COMPARE_SLOWPATH(true, true, true),
+ COMPARE_SEQUENTIAL(false, false),
+ COMPARE_SEQUENTIAL(true, false),
+ COMPARE_SEQUENTIAL(false, true),
+ COMPARE_SEQUENTIAL(true, true),
};
-static tuple_compare_with_key_t
-tuple_compare_with_key_create(const struct key_def *def)
+#undef COMPARE_SLOWPATH
+#undef COMPARE_SEQUENTIAL
+
+/* }}} tuple_compare_with_key */
+
+void
+key_def_set_compare_func(struct key_def *def)
{
- int cmp_func_idx = (def->is_nullable ? 1 : 0) +
- 2 * (def->has_optional_parts ? 1 : 0) +
- 4 * (def->has_json_paths ? 1 : 0);
- if (def->is_nullable) {
- if (key_def_is_sequential(def)) {
- if (def->has_optional_parts) {
- return tuple_compare_with_key_sequential<true,
- true>;
- } else {
- return tuple_compare_with_key_sequential<true,
- false>;
+ def->tuple_compare = NULL;
+ def->tuple_compare_with_key = NULL;
+ if (!def->is_nullable && !key_def_has_collation(def) &&
+ !def->has_json_paths) {
+ /* Precalculated comparators don't use collation */
+ for (uint32_t k = 0; k < sizeof(precompiled_cmp_arr) /
+ sizeof(precompiled_cmp_arr[0]); k++) {
+ uint32_t i = 0;
+ for (; i < def->part_count; i++) {
+ if (def->parts[i].fieldno !=
+ precompiled_cmp_arr[k].p[i * 2] ||
+ def->parts[i].type !=
+ precompiled_cmp_arr[k].p[i * 2 + 1])
+ break;
+ }
+ if (i == def->part_count &&
+ precompiled_cmp_arr[k].p[i * 2] == UINT32_MAX) {
+ def->tuple_compare = precompiled_cmp_arr[k].f;
+ break;
}
- } else {
- return compare_with_key_slowpath_funcs[cmp_func_idx];
}
- }
- assert(! def->has_optional_parts);
- if (!key_def_has_collation(def) && !def->has_json_paths) {
- /* Precalculated comparators don't use collation */
- for (uint32_t k = 0;
- k < sizeof(cmp_wk_arr) / sizeof(cmp_wk_arr[0]);
- k++) {
-
+ for (uint32_t k = 0; k < sizeof(precompiled_cmp_wk_arr) /
+ sizeof(precompiled_cmp_wk_arr[0]); k++) {
uint32_t i = 0;
for (; i < def->part_count; i++) {
if (def->parts[i].fieldno !=
- cmp_wk_arr[k].p[i * 2] ||
- def->parts[i].type !=
- cmp_wk_arr[k].p[i * 2 + 1]) {
+ precompiled_cmp_wk_arr[k].p[i * 2] ||
+ def->parts[i].type !=
+ precompiled_cmp_wk_arr[k].p[i * 2 + 1])
break;
+ }
+ if (i == def->part_count) {
+ def->tuple_compare_with_key =
+ precompiled_cmp_wk_arr[k].f;
+ break;
+ }
+ }
+ }
+ if (def->tuple_compare == NULL || def->tuple_compare_with_key == NULL) {
+ for (uint32_t k = 0; k < sizeof(cmp_arr) /
+ sizeof(cmp_arr[0]); k++) {
+ if (def->is_nullable == cmp_arr[k].is_nullable &&
+ def->has_optional_parts ==
+ cmp_arr[k].has_optional_parts &&
+ def->has_json_paths == cmp_arr[k].has_json_paths &&
+ key_def_is_sequential(def) ==
+ cmp_arr[k].is_sequential) {
+ if (def->tuple_compare == NULL) {
+ def->tuple_compare =
+ cmp_arr[k].tuple_compare;
}
+ if (def->tuple_compare_with_key == NULL) {
+ def->tuple_compare_with_key =
+ cmp_arr[k].
+ tuple_compare_with_key;
+ }
+ break;
}
- if (i == def->part_count)
- return cmp_wk_arr[k].f;
}
}
- return key_def_is_sequential(def) ?
- tuple_compare_with_key_sequential<false, false> :
- compare_with_key_slowpath_funcs[cmp_func_idx];
-}
-
-/* }}} tuple_compare_with_key */
-
-void
-key_def_set_compare_func(struct key_def *def)
-{
- def->tuple_compare = tuple_compare_create(def);
- def->tuple_compare_with_key = tuple_compare_with_key_create(def);
+ assert(def->tuple_compare != NULL &&
+ def->tuple_compare_with_key != NULL);
}
--
2.21.0
More information about the Tarantool-patches
mailing list