Skip to content

Commit

Permalink
move to clang-16 and clang-format-16
Browse files Browse the repository at this point in the history
  • Loading branch information
rtjohnso committed Dec 25, 2024
1 parent 0f73b69 commit ac172df
Show file tree
Hide file tree
Showing 23 changed files with 88 additions and 85 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/run-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ jobs:
- uses: actions/checkout@v4
- uses: awalsh128/cache-apt-pkgs-action@latest
with:
packages: libxxhash-dev libaio-dev libconfig-dev gcc clang-13 clang-format-13
packages: libxxhash-dev libaio-dev libconfig-dev gcc clang-16 clang-format-16
- name: format-check
run: ./format-check.sh

Expand Down Expand Up @@ -68,6 +68,6 @@ jobs:
- uses: actions/checkout@v4
- uses: awalsh128/cache-apt-pkgs-action@latest
with:
packages: libxxhash-dev libaio-dev libconfig-dev gcc clang-13 clang-format-13
packages: libxxhash-dev libaio-dev libconfig-dev gcc clang-16 clang-format-16
- name: make ${{ matrix.target }}
run: make ${{ matrix.target }}
14 changes: 7 additions & 7 deletions Dockerfile.build-env
Original file line number Diff line number Diff line change
Expand Up @@ -23,17 +23,17 @@ set -euo pipefail; \
export DEBIAN_FRONTEND=noninteractive; \
apt-get update -y && apt-get install -y software-properties-common wget'

# Install llvm and clang v13 for Ubuntu 20.04 and make them the default
# We rely on clang-format-13 for ./format-check.sh
# Install llvm and clang v16 for Ubuntu 20.04 and make them the default
# We rely on clang-format-16 for ./format-check.sh
RUN /bin/bash -c ' \
set -euo pipefail; \
export DEBIAN_FRONTEND=noninteractive; \
wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | apt-key add - \
&& add-apt-repository "deb http://apt.llvm.org/focal/ llvm-toolchain-focal-13 main" \
&& apt-get install -y clang-13 clang-format-13 lld-13 \
&& update-alternatives --install /usr/bin/clang clang /usr/bin/clang-13 1 \
&& update-alternatives --install /usr/bin/lld lld /usr/bin/lld-13 1 \
&& update-alternatives --install /usr/bin/ld.lld ld.lld /usr/bin/ld.lld-13 1'
&& add-apt-repository "deb http://apt.llvm.org/focal/ llvm-toolchain-focal-16 main" \
&& apt-get install -y clang-16 clang-format-16 lld-16 \
&& update-alternatives --install /usr/bin/clang clang /usr/bin/clang-16 1 \
&& update-alternatives --install /usr/bin/lld lld /usr/bin/lld-16 1 \
&& update-alternatives --install /usr/bin/ld.lld ld.lld /usr/bin/ld.lld-16 1'

# Install remaining tools required for builds
RUN /bin/bash -c ' \
Expand Down
16 changes: 3 additions & 13 deletions docs/build.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,25 +7,15 @@ To integrate SplinterDB into another application, see [Usage](usage.md).
Builds are known to work on Ubuntu using recent versions of GCC and Clang.

### Tooling
In CI, we test against GCC 9 and Clang 13.
In CI, we test against GCC 13 and Clang 16.

We use `clang-format-13` for code formatting.

To install `clang-13` tools on Ubuntu Linux, do this:

```shell
$ wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | sudo apt-key add -

$ sudo add-apt-repository 'deb http://apt.llvm.org/focal/ llvm-toolchain-focal-13 main'

$ sudo apt-get install -y clang-13 clang-format-13
```
We use `clang-format-16` for code formatting.

### Full build
Here are the steps to do a full-build of the library, run smoke tests, and to install the shared libraries:

```shell
$ export COMPILER=gcc # or clang-13
$ export COMPILER=gcc # or clang
$ sudo apt update -y
$ sudo apt install -y libaio-dev libconfig-dev libxxhash-dev $COMPILER
$ export CC=$COMPILER
Expand Down
16 changes: 3 additions & 13 deletions docs/site/content/docs/build.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,25 +7,15 @@ To integrate SplinterDB into another application, see [Usage](usage.md).
Builds are known to work on Ubuntu using recent versions of GCC and Clang.

### Tooling
In CI, we test against GCC 9 and Clang 13.
In CI, we test against GCC 13 and Clang 16.

We use `clang-format-13` for code formatting.

To install `clang-13` tools on Ubuntu Linux, do this:

```shell
$ wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | sudo apt-key add -

$ sudo add-apt-repository 'deb http://apt.llvm.org/focal/ llvm-toolchain-focal-13 main'

$ sudo apt-get install -y clang-13 clang-format-13
```
We use `clang-format-16` for code formatting.

### Full build
Here are the steps to do a full-build of the library, run smoke tests, and to install the shared libraries:

```shell
$ export COMPILER=gcc # or clang-13
$ export COMPILER=gcc # or clang
$ sudo apt update -y
$ sudo apt install -y libaio-dev libconfig-dev libxxhash-dev $COMPILER
$ export CC=$COMPILER
Expand Down
16 changes: 3 additions & 13 deletions docs/site/content/docs/v0.0.1/build.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,25 +7,15 @@ To integrate SplinterDB into another application, see [Usage](usage.md).
Builds are known to work on Ubuntu using recent versions of GCC and Clang.

### Tooling
In CI, we test against GCC 9 and Clang 13.
In CI, we test against GCC 13 and Clang 16.

We use `clang-format-13` for code formatting.

To install `clang-13` tools on Ubuntu Linux, do this:

```shell
$ wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | sudo apt-key add -

$ sudo add-apt-repository 'deb http://apt.llvm.org/focal/ llvm-toolchain-focal-13 main'

$ sudo apt-get install -y clang-13 clang-format-13
```
We use `clang-format-16` for code formatting.

### Full build
Here are the steps to do a full-build of the library, run smoke tests, and to install the shared libraries:

```shell
$ export COMPILER=gcc # or clang-13
$ export COMPILER=gcc # or clang
$ sudo apt update -y
$ sudo apt install -y libaio-dev libconfig-dev libxxhash-dev $COMPILER
$ export CC=$COMPILER
Expand Down
6 changes: 4 additions & 2 deletions examples/splinterdb_wide_values_example.c
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,8 @@ main()
int nrows = 0;
// Insert few values doubling value-size for each new key inserted
for (int val_len = 16; val_len <= USER_MAX_VALUE_SIZE;
val_len <<= 1, nrows++) {
val_len <<= 1, nrows++)
{
snprintf(key_buf, sizeof(key_buf), "Key with val_len=%d", val_len);
memset(val_buf, 'z', val_len);

Expand Down Expand Up @@ -83,7 +84,8 @@ main()
// Lookup keys which have increasingly wider-values, using a small fixed size
// output buffer. When necessary, memory will be allocated for wider values.
for (int val_len = 16; val_len <= USER_MAX_VALUE_SIZE;
val_len <<= 1, nrows++) {
val_len <<= 1, nrows++)
{

char key_buf[USER_MAX_KEY_SIZE];
snprintf(key_buf, sizeof(key_buf), "Key with val_len=%d", val_len);
Expand Down
2 changes: 1 addition & 1 deletion format-check.sh
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ set -eu -o pipefail

# different tool versions yield different results
# so we standardize on this version
TOOL="clang-format-13"
TOOL="clang-format-16"

# Check if TOOL exists
if ! command -v "$TOOL" &> /dev/null; then
Expand Down
3 changes: 2 additions & 1 deletion src/btree.c
Original file line number Diff line number Diff line change
Expand Up @@ -598,7 +598,8 @@ btree_create_leaf_incorporate_spec(const btree_config *cfg,
return STATUS_NO_MEMORY;
}
if (btree_merge_tuples(
cfg, tuple_key, oldmessage, &spec->msg.merged_message)) {
cfg, tuple_key, oldmessage, &spec->msg.merged_message))
{
merge_accumulator_deinit(&spec->msg.merged_message);
return STATUS_NO_MEMORY;
} else {
Expand Down
3 changes: 2 additions & 1 deletion src/clockcache.c
Original file line number Diff line number Diff line change
Expand Up @@ -2172,7 +2172,8 @@ clockcache_get_internal(clockcache *cc, // IN
return TRUE;
case GET_RC_SUCCESS:
if (clockcache_get_entry(cc, entry_number)->page.disk_addr
!= addr) {
!= addr)
{
// this also means we raced with eviction and really lost
clockcache_dec_ref(cc, entry_number, tid);
return TRUE;
Expand Down
2 changes: 1 addition & 1 deletion src/clockcache.h
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
#include "cache.h"
#include "io.h"

//#define ADDR_TRACING
// #define ADDR_TRACING
#define TRACE_ADDR (UINT64_MAX - 1)
#define TRACE_ENTRY (UINT32_MAX - 1)

Expand Down
3 changes: 2 additions & 1 deletion src/merge.c
Original file line number Diff line number Diff line change
Expand Up @@ -630,7 +630,8 @@ merge_iterator_set_direction(merge_iterator *merge_itor, bool32 forwards)
iterator_next(merge_itor->ordered_iterators[i]->itor);
}
if (!forwards
&& iterator_can_prev(merge_itor->ordered_iterators[i]->itor)) {
&& iterator_can_prev(merge_itor->ordered_iterators[i]->itor))
{
iterator_prev(merge_itor->ordered_iterators[i]->itor);
}
}
Expand Down
3 changes: 2 additions & 1 deletion src/platform_linux/platform_inline.h
Original file line number Diff line number Diff line change
Expand Up @@ -233,7 +233,8 @@ platform_getpid()

static inline void
platform_yield()
{}
{
}

// platform predicates
static inline bool32
Expand Down
2 changes: 1 addition & 1 deletion src/platform_linux/platform_types.h
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ typedef struct {
long min, max, total;
unsigned long num; // no. of elements
unsigned long count[];
} * platform_histo_handle;
} *platform_histo_handle;

#define UNUSED_PARAM(_parm) _parm __attribute__((__unused__))
#define UNUSED_TYPE(_parm) UNUSED_PARAM(_parm)
Expand Down
2 changes: 1 addition & 1 deletion src/platform_linux/poison.h
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@
#pragma GCC poison fputs

#pragma GCC poison printf
//#pragma GCC poison FILE
// #pragma GCC poison FILE

#pragma GCC poison pthread_t

Expand Down
15 changes: 10 additions & 5 deletions src/routing_filter.c
Original file line number Diff line number Diff line change
Expand Up @@ -472,7 +472,8 @@ routing_filter_add(cache *cc,
&& routing_get_index(fp_arr[fp_no], index_remainder_and_value_size)
== index_no;
fp_no++)
{}
{
}
index_count[index_no] = fp_no - index_start;
}

Expand Down Expand Up @@ -527,7 +528,8 @@ routing_filter_add(cache *cc,
}
uint32 old_fps_added = 0;
for (uint32 index_off = 0; index_off < new_indices_per_old_index;
index_off++) {
index_off++)
{
uint32 *new_src_fp = &fp_arr[fp_no];
uint32 index_no = old_index_no * new_indices_per_old_index + index_off;
uint32 last_bucket = index_no * index_size;
Expand All @@ -539,7 +541,8 @@ routing_filter_add(cache *cc,
// last_bucket, end_bucket);
uint32 last_fp_added = UINT32_MAX;
while (new_fps_added < new_index_count
|| old_fps_added < old_index_count) {
|| old_fps_added < old_index_count)
{
uint32 fp;
bool32 is_old = ((new_fps_added == new_index_count)
|| ((old_fps_added != old_index_count)
Expand Down Expand Up @@ -649,7 +652,8 @@ routing_filter_prefetch(cache *cc,
uint64 index_no = 0;

for (uint64 index_page_no = 0; index_page_no < num_index_pages;
index_page_no++) {
index_page_no++)
{
uint64 index_addr = filter->addr + (page_size * index_page_no);
page_handle *index_page =
cache_get(cc, index_addr, TRUE, PAGE_TYPE_FILTER);
Expand Down Expand Up @@ -752,7 +756,8 @@ routing_filter_estimate_unique_fp(cache *cc,
remainder_and_value_size);
uint32 last_fp = UINT32_MAX;
for (uint32 bucket_off = 0; bucket_off < index_size;
bucket_off++) {
bucket_off++)
{
uint32 bucket = index_bucket_start + bucket_off;
for (uint32 i = 0; i < count[bucket_off]; i++) {
fp_arr[src_fp_no] |= bucket << remainder_and_value_size;
Expand Down
6 changes: 4 additions & 2 deletions src/task.c
Original file line number Diff line number Diff line change
Expand Up @@ -666,7 +666,8 @@ task_enqueue(task_system *ts,
new_task->enqueue_time = platform_get_timestamp();
const threadid tid = platform_get_tid();
if (group->current_waiting_tasks
> group->stats[tid].max_outstanding_tasks) {
> group->stats[tid].max_outstanding_tasks)
{
group->stats[tid].max_outstanding_tasks = group->current_waiting_tasks;
}
group->stats[tid].total_tasks_enqueued += 1;
Expand Down Expand Up @@ -992,7 +993,8 @@ task_group_print_stats(task_group *group, task_type type)
global.max_runtime_func = group->stats[i].max_runtime_func;
}
if (group->stats[i].max_queue_wait_time_ns
> global.max_queue_wait_time_ns) {
> global.max_queue_wait_time_ns)
{
global.max_queue_wait_time_ns = group->stats[i].max_queue_wait_time_ns;
}
global.max_outstanding_tasks = MAX(global.max_outstanding_tasks,
Expand Down
24 changes: 16 additions & 8 deletions src/trunk.c
Original file line number Diff line number Diff line change
Expand Up @@ -2634,7 +2634,8 @@ trunk_clear_bundle(trunk_handle *spl, trunk_node *node, uint16 bundle_no)

// update the pivot start bundles
for (uint16 pivot_no = 0; pivot_no < trunk_num_children(spl, node);
pivot_no++) {
pivot_no++)
{
trunk_pivot_data *pdata = trunk_get_pivot_data(spl, node, pivot_no);
if (!trunk_bundle_valid(spl, node, pdata->start_bundle)) {
pdata->start_bundle = trunk_start_bundle(spl, node);
Expand Down Expand Up @@ -2822,7 +2823,8 @@ trunk_process_generation_to_pos(trunk_handle *spl,
{
uint64 pos = 0;
while ((pos != TRUNK_MAX_PIVOTS)
&& (req->pivot_generation[pos] != generation)) {
&& (req->pivot_generation[pos] != generation))
{
pos++;
}
return pos;
Expand Down Expand Up @@ -3128,7 +3130,8 @@ trunk_replace_bundle_branches(trunk_handle *spl,
for (uint16 pivot_no = 0; pivot_no < num_children; pivot_no++) {
trunk_pivot_data *pdata = trunk_get_pivot_data(spl, node, pivot_no);
if (!trunk_branch_live_for_pivot(
spl, node, bundle_start_branch, pivot_no)) {
spl, node, bundle_start_branch, pivot_no))
{
pdata->start_branch =
trunk_subtract_branch_number(spl, pdata->start_branch, branch_diff);
debug_assert(trunk_branch_valid(spl, node, pdata->start_branch));
Expand Down Expand Up @@ -4056,7 +4059,8 @@ trunk_prepare_build_filter(trunk_handle *spl,
trunk_pivot_data *pdata = trunk_get_pivot_data(spl, node, pivot_no);

if (trunk_bundle_live_for_pivot(
spl, node, compact_req->bundle_no, pivot_no)) {
spl, node, compact_req->bundle_no, pivot_no))
{
uint64 pos = trunk_process_generation_to_pos(
spl, compact_req, pdata->generation);
platform_assert(pos != TRUNK_MAX_PIVOTS);
Expand Down Expand Up @@ -4165,7 +4169,8 @@ trunk_replace_routing_filter(trunk_handle *spl,
uint64 pos =
trunk_process_generation_to_pos(spl, compact_req, pdata->generation);
if (!trunk_bundle_live_for_pivot(
spl, node, compact_req->bundle_no, pivot_no)) {
spl, node, compact_req->bundle_no, pivot_no))
{
if (pos != TRUNK_MAX_PIVOTS && filter_scratch->filter[pos].addr != 0) {
trunk_dec_filter(spl, &filter_scratch->filter[pos]);
ZERO_CONTENTS(&filter_scratch->filter[pos]);
Expand Down Expand Up @@ -4777,7 +4782,8 @@ trunk_flush_fullest(trunk_handle *spl, trunk_node *node)
* children
*/
for (uint16 pivot_no = 0; pivot_no < trunk_num_children(spl, node);
pivot_no++) {
pivot_no++)
{
trunk_pivot_data *pdata = trunk_get_pivot_data(spl, node, pivot_no);
// if a pivot has too many branches, just flush it here
if (trunk_pivot_needs_flush(spl, node, pdata)) {
Expand Down Expand Up @@ -5804,7 +5810,8 @@ trunk_split_leaf(trunk_handle *spl,
platform_assert_status_ok(rc2);

for (uint64 branch_offset = 0; branch_offset < num_branches;
branch_offset++) {
branch_offset++)
{
uint64 branch_no =
trunk_add_branch_number(spl, start_branch, branch_offset);
debug_assert(branch_no != trunk_end_branch(spl, leaf));
Expand Down Expand Up @@ -8655,7 +8662,8 @@ trunk_print_branches_and_bundles(platform_log_handle *log_handle,
{
// Generate marker line if current branch is a pivot's start branch
for (uint16 pivot_no = 0; pivot_no < trunk_num_children(spl, node);
pivot_no++) {
pivot_no++)
{
if (branch_no == trunk_pivot_start_branch(spl, node, pivot_no)) {
// clang-format off
platform_log(log_handle, "| | -- pivot %2u -- | | | | |\n",
Expand Down
Loading

0 comments on commit ac172df

Please sign in to comment.