Skip to content

Commit

Permalink
Merge pull request CESNET#48 from CESNET/devel
Browse files Browse the repository at this point in the history
Bump version to 2.2.0
  • Loading branch information
Lukas955 authored Mar 14, 2021
2 parents 5515554 + 1749984 commit 0744bcf
Show file tree
Hide file tree
Showing 123 changed files with 12,801 additions and 825 deletions.
16 changes: 12 additions & 4 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ jobs:
strategy:
fail-fast: false
matrix:
image: ['ubuntu:18.04', 'ubuntu:19.04', 'ubuntu:20.04', 'debian:stretch', 'debian:buster', 'debian:bullseye', 'centos:7', 'centos:8', 'fedora:29', 'fedora:30', 'fedora:31']
image: ['ubuntu:18.04', 'ubuntu:20.04', 'debian:stretch', 'debian:buster', 'debian:bullseye', 'centos:7', 'centos:8']

name: Build on ${{ matrix.image }}
container: ${{ matrix.image }}
Expand All @@ -29,18 +29,26 @@ jobs:
apt-get update
apt-get -y install git gcc g++ cmake make libxml2-dev liblz4-dev libzstd-dev
apt-get -y install python3-docutils zlib1g-dev pkg-config
apt-get -y install librdkafka-dev
env:
DEBIAN_FRONTEND: noninteractive
- name: Enable additional repositories (CentOS 8)
if: startsWith(matrix.image, 'centos:8')
run: |
dnf -y install 'dnf-command(config-manager)'
dnf config-manager --set-enabled appstream powertools
- name: Install dependencies for libfds and IPFIXcol2 (CentOS)
if: startsWith(matrix.image, 'centos')
run: |
yum -y install epel-release
yum -y install git gcc gcc-c++ cmake make libxml2-devel lz4-devel libzstd-devel
yum -y install zlib-devel pkgconfig
yum -y install zlib-devel pkgconfig librdkafka-devel
yum -y install python3-docutils || yum -y install python-docutils
- name: Install depedencies for libfds and IPFIXcol2 (Fedora)
- name: Install dependencies for libfds and IPFIXcol2 (Fedora)
if: startsWith(matrix.image, 'fedora')
run: |
dnf -y install git gcc gcc-c++ cmake make libxml2-devel lz4-devel libzstd-devel
dnf -y install python3-docutils zlib-devel pkgconfig
dnf -y install python3-docutils zlib-devel pkgconfig librdkafka-devel
# Build libfds library ------------------------------------------------------------------
# Note: Master against master branch. Otherwise against debug branch.
Expand Down
21 changes: 14 additions & 7 deletions .github/workflows/packages.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,15 @@ jobs:
strategy:
fail-fast: false
matrix:
image: ['ubuntu:18.04', 'ubuntu:19.04', 'ubuntu:20.04', 'debian:stretch', 'debian:buster', 'debian:bullseye']
image: ['ubuntu:18.04', 'ubuntu:20.04', 'debian:stretch', 'debian:buster', 'debian:bullseye']

name: Build DEBs on ${{ matrix.image }}
container: ${{ matrix.image }}

steps:
- uses: actions/checkout@v1
- name: Define global variables
run: echo "::set-output name=zip_file::libfds-${IMAGE//:/}-$GITHUB_SHA.zip"
run: echo "::set-output name=zip_file::ipfixcol2-${IMAGE//:/}-$GITHUB_SHA.zip"
shell: bash
env:
IMAGE: ${{ matrix.image }}
Expand All @@ -35,8 +35,10 @@ jobs:
run: |
apt-get update
apt-get -y install git gcc g++ cmake make libxml2-dev liblz4-dev libzstd-dev
apt-get -y install python3-docutils zlib1g-dev pkg-config
apt-get -y install python3-docutils zlib1g-dev pkg-config librdkafka-dev
apt-get -y install debhelper devscripts build-essential fakeroot zip
env:
DEBIAN_FRONTEND: noninteractive

# Build LIBFDS DEB package ---------------------------------------------------------------
- name: Checkout libfds library - master branch
Expand Down Expand Up @@ -83,7 +85,7 @@ jobs:
strategy:
fail-fast: false
matrix:
image: ['centos:7', 'centos:8', 'fedora:29', 'fedora:30', 'fedora:31']
image: ['centos:7', 'centos:8']

name: Build RPMs on ${{ matrix.image }}
container: ${{ matrix.image }}
Expand All @@ -92,25 +94,30 @@ jobs:
- uses: actions/checkout@v1
- name: Prepare environment and variables
run: |
echo "::set-output name=zip_file::libfds-${IMAGE//:/}-$GITHUB_SHA.zip"
echo "::set-output name=zip_file::ipfixcol2-${IMAGE//:/}-$GITHUB_SHA.zip"
mkdir -p build/libfds_repo
env:
IMAGE: ${{ matrix.image }}
id: vars

# Dependencies ---------------------------------------------------------------------------
- name: Enable additional repositories (CentOS 8)
if: startsWith(matrix.image, 'centos:8')
run: |
dnf -y install 'dnf-command(config-manager)'
dnf config-manager --set-enabled appstream powertools
- name: Install dependencies for libfds and IPFIXcol2 (CentOS)
if: startsWith(matrix.image, 'centos')
run: |
yum -y install epel-release
yum -y install git gcc gcc-c++ cmake make libxml2-devel lz4-devel libzstd-devel
yum -y install zlib-devel pkgconfig rpm-build
yum -y install zlib-devel pkgconfig rpm-build librdkafka-devel
yum -y install python3-docutils || yum -y install python-docutils
- name: Install depedencies for libfds and IPFIXcol2 (Fedora)
if: startsWith(matrix.image, 'fedora')
run: |
dnf -y install git gcc gcc-c++ cmake make libxml2-devel lz4-devel libzstd-devel
dnf -y install python3-docutils zlib-devel pkgconfig rpm-build
dnf -y install python3-docutils zlib-devel pkgconfig rpm-build librdkafka-devel
# Build LIBFDS RPM package ---------------------------------------------------------------
- name: Checkout libfds library - master branch
Expand Down
22 changes: 21 additions & 1 deletion CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ endif()

# Versions and other informations
set(IPFIXCOL_VERSION_MAJOR 2)
set(IPFIXCOL_VERSION_MINOR 1)
set(IPFIXCOL_VERSION_MINOR 2)
set(IPFIXCOL_VERSION_PATCH 0)
set(IPFIXCOL_VERSION
${IPFIXCOL_VERSION_MAJOR}.${IPFIXCOL_VERSION_MINOR}.${IPFIXCOL_VERSION_PATCH})
Expand All @@ -38,6 +38,8 @@ if (NOT COMPILER_SUPPORT_GNUXX11)
message(FATAL_ERROR "Compiler does NOT support C++11 with GNU extension")
endif()



# ------------------------------------------------------------------------------
# Set default build type if not specified by user
set(DEFAULT_BUILD_TYPE "Release")
Expand Down Expand Up @@ -76,6 +78,23 @@ if (ENABLE_TESTS AND ENABLE_TESTS_COVERAGE)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} --coverage -fprofile-arcs -ftest-coverage")
endif()

## -----------------------------------------------------------------------------
# Find libfds
find_package(LibFds 0.2.0 REQUIRED)

# Find rst2man
if (ENABLE_DOC_MANPAGE)
find_package(Rst2Man)
if (NOT RST2MAN_FOUND)
message(FATAL_ERROR "rst2man is not available")
endif()
endif()

# Find pthreads
set(CMAKE_THREAD_PREFER_PTHREAD TRUE)
set(THREADS_PREFER_PTHREAD_FLAG TRUE)
find_package(Threads REQUIRED)

# ------------------------------------------------------------------------------
# Project components
add_subdirectory(include)
Expand All @@ -86,6 +105,7 @@ add_subdirectory(pkg)
if (ENABLE_TESTS)
enable_testing()
add_subdirectory(tests/unit)
add_subdirectory(tests/modules)
endif()

# ------------------------------------------------------------------------------
Expand Down
57 changes: 57 additions & 0 deletions CMakeModules/FindLibRDKafka.cmake
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
# LIBRDKAFKA_FOUND - System has librdkafka
# LIBRDKAFKA_INCLUDE_DIRS - The librdkafka include directories
# LIBRDKAFKA_LIBRARIES - The libraries needed to use librdkafka
# LIBRDKAFKA_DEFINITIONS - Compiler switches required for using librdkafka

# use pkg-config to get the directories and then use these values
# in the find_path() and find_library() calls
find_package(PkgConfig)
if (PKG_CONFIG_FOUND)
pkg_check_modules(PC_RDKAFKA QUIET rdkafka)
set(LIBRDKAFKA_DEFINITIONS ${PC_RDKAFKA_CFLAGS_OTHER})
endif()

find_path(
KAFKA_INCLUDE_DIR librdkafka/rdkafka.h
HINTS ${PC_RDKAFKA_INCLUDEDIR} ${PC_RDKAFKA_INCLUDE_DIRS}
PATH_SUFFIXES include
)

find_library(
KAFKA_LIBRARY NAMES rdkafka librdkafka
HINTS ${PC_RDKAFKA_LIBDIR} ${PC_RDKAFKA_LIBRARY_DIRS}
PATH_SUFFIXES lib lib64
)

if (PC_RDKAFKA_VERSION)
# Version extracted from pkg-config
set(KAFKA_VERSION_STRING ${PC_RDKAFKA_VERSION})
elseif(KAFKA_INCLUDE_DIR AND KAFKA_LIBRARY)
# Try to get the version of the installed library
try_run(
KAFKA_RES_RUN KAFKA_RES_COMP
${CMAKE_CURRENT_BINARY_DIR}/try_run/kafka_version_test/
${PROJECT_SOURCE_DIR}/CMakeModules/try_run/kafka_version.c
CMAKE_FLAGS
-DLINK_LIBRARIES=${KAFKA_LIBRARY}
-DINCLUDE_DIRECTORIES=${KAFKA_INCLUDE_DIR}
RUN_OUTPUT_VARIABLE KAFKA_VERSION_VAR
)

if (KAFKA_RES_COMP AND KAFKA_RES_RUN EQUAL 0)
# Successfully compiled and executed with return code 0
set(KAFKA_VERSION_STRING ${KAFKA_VERSION_VAR})
endif()
endif()

# handle the QUIETLY and REQUIRED arguments and set LIBRDKAFKA_FOUND to TRUE
# if all listed variables are TRUE
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(LibRDKafka
REQUIRED_VARS KAFKA_LIBRARY KAFKA_INCLUDE_DIR
VERSION_VAR KAFKA_VERSION_STRING
)

set(LIBRDKAFKA_LIBRARIES ${KAFKA_LIBRARY})
set(LIBRDKAFKA_INCLUDE_DIRS ${KAFKA_INCLUDE_DIR})
mark_as_advanced(KAFKA_INCLUDE_DIR KAFKA_LIBRARY)
14 changes: 14 additions & 0 deletions CMakeModules/try_run/kafka_version.c
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
#include <librdkafka/rdkafka.h>
#include <stdio.h>

int
main(int argc, char *argv[])
{
const char *ver_str = rd_kafka_version_str();
if (!ver_str) {
return 1;
}

printf("%s", ver_str);
return 0;
}
29 changes: 20 additions & 9 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -29,24 +29,28 @@ No problem, pick any combination of plugins.
Available plugins
-----------------

**Input plugins** - receive IPFIX data. Each can be configured to to listen on a specific
**Input plugins** - receive NetFlow/IPFIX data. Each can be configured to listen on a specific
network interface and a port. Multiple instances of these plugins can run concurrently.

- `UDP <src/plugins/input/udp>`_ - receives NetFlow v5/v9 and IPFIX over UDP
- `TCP <src/plugins/input/tcp>`_ - receives IPFIX over TCP
- `UDP <src/plugins/input/udp>`_ - receive NetFlow v5/v9 and IPFIX over UDP
- `TCP <src/plugins/input/tcp>`_ - receive IPFIX over TCP
- `FDS File <src/plugins/input/fds>`_ - read flow data from FDS File (efficient long-term storage)
- `IPFIX File <src/plugins/input/ipfix>`_ - read flow data from IPFIX File

**Intermediate plugins** - modify, enrich and filter flow records.

- `anonymization <src/plugins/intermediate/anonymization/>`_ - anonymize IP addresses
- `Anonymization <src/plugins/intermediate/anonymization/>`_ - anonymize IP addresses
(in flow records) with Crypto-PAn algorithm

**Output plugins** - store or forward your flows.

- `FDS file <src/plugins/output/fds>`_ - store all flows in FDS file format (efficient long-term storage)
- `FDS File <src/plugins/output/fds>`_ - store all flows in FDS file format (efficient long-term storage)
- `Forwarder <src/plugins/output/forwarder>`_ - forward flows as IPFIX to one or mode subcollectors
- `IPFIX File <src/plugins/output/ipfix>`_ - store all flows in IPFIX File format
- `JSON <src/plugins/output/json>`_ - convert flow records to JSON and send/store them
- `JSON-Kafka <src/plugins/output/json-kafka>`_ - convert flow records to JSON and send them to Apache Kafka
- `Viewer <src/plugins/output/viewer>`_ - convert IPFIX into plain text and print
it on standard output
- `IPFIX file <src/plugins/output/ipfix>`_ - store all flows in IPFIX File format
- `Time Check <src/plugins/output/timecheck>`_ - flow timestamp check
- `Dummy <src/plugins/output/dummy>`_ - simple output module example
- `lnfstore <extra_plugins/output/lnfstore>`_ (*) - store all flows in nfdump compatible
Expand Down Expand Up @@ -79,24 +83,31 @@ Second, install build dependencies of the collector

.. code-block::
yum install gcc gcc-c++ cmake make python3-docutils zlib-devel
yum install gcc gcc-c++ cmake make python3-docutils zlib-devel librdkafka-devel
# Optionally: doxygen pkgconfig
* Note: latest systems (e.g. Fedora) use ``dnf`` instead of ``yum``.
* Note: latest systems (e.g. Fedora/CentOS 8) use ``dnf`` instead of ``yum``.
* Note: package ``python3-docutils`` may by also named as ``python-docutils`` or ``python2-docutils``
* Note: package ``pkgconfig`` may by also named as ``pkg-config``
* Note: CentOS 8 requires additional system repositories (``appstream`` and ``powertools``) to be enabled:

.. code-block::
dnf config-manager --set-enabled appstream powertools
**Debian/Ubuntu:**

.. code-block::
apt-get install gcc g++ cmake make python3-docutils zlib1g-dev
apt-get install gcc g++ cmake make python3-docutils zlib1g-dev librdkafka-dev
# Optionally: doxygen pkg-config
Finally, build and install the collector:

.. code-block:: bash
$ git clone https://github.com/CESNET/ipfixcol2.git
$ cd ipfixcol2
$ mkdir build && cd build && cmake ..
$ make
# make install
Expand Down
3 changes: 2 additions & 1 deletion doc/data/configs/tcp2unirec.xml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
<!--
Receive flow data over TCP, convert them into UniRec format and send via
Receive flow data over TCP, convert them into UniRec format and send via
TCP TRAP communication interface (port 8000).
-->
<ipfixcol2>
Expand All @@ -23,6 +23,7 @@
<params>
<!-- UniRec template -->
<uniRecFormat>TIME_FIRST,TIME_LAST,SRC_IP,DST_IP,PROTOCOL,?SRC_PORT,?DST_PORT,?TCP_FLAGS,PACKETS,BYTES</uniRecFormat>
<splitBiflow>true</splitBiflow>
<!-- TRAP interface configuration -->
<trapIfcCommon>
<timeout>HALF_WAIT</timeout>
Expand Down
58 changes: 58 additions & 0 deletions doc/data/configs/udp2json-kafka.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
<!--
Receive flow data over UDP, convert them into JSON and provide them as
a server on local port
-->
<ipfixcol2>
<!-- Input plugins -->
<inputPlugins>
<input>
<name>UDP collector</name>
<plugin>udp</plugin>
<params>
<!-- List on port 4739 -->
<localPort>4739</localPort>
<!-- Bind to all local adresses -->
<localIPAddress></localIPAddress>
</params>
</input>
</inputPlugins>

<!-- Output plugins -->
<outputPlugins>
<output>
<name>JSON output</name>
<plugin>json-kafka</plugin>
<params>
<!-- JSON format paramters -->
<tcpFlags>formatted</tcpFlags>
<timestamp>formatted</timestamp>
<protocol>formatted</protocol>
<ignoreUnknown>true</ignoreUnknown>
<ignoreOptions>true</ignoreOptions>
<nonPrintableChar>true</nonPrintableChar>
<octetArrayAsUint>true</octetArrayAsUint>
<numericNames>false</numericNames>
<splitBiflow>false</splitBiflow>
<detailedInfo>false</detailedInfo>
<templateInfo>false</templateInfo>

<!-- Output methods -->
<outputs>
<kafka>
<name>Send to Kafka</name>
<brokers>127.0.0.1</brokers>
<topic>ipfix</topic>
<blocking>false</blocking>
<partition>unassigned</partition>

<!-- Zero or more additional properties -->
<property>
<key>compression.codec</key>
<value>lz4</value>
</property>
</kafka>
</outputs>
</params>
</output>
</outputPlugins>
</ipfixcol2>
2 changes: 2 additions & 0 deletions doc/sphinx/configuration.rst
Original file line number Diff line number Diff line change
Expand Up @@ -169,6 +169,8 @@ of IPFIXcol. Always keep in mind that you should modify a configuration to fit y

:`udp2json <../data/configs/udp2json.xml>`_:
Receive flow data over UDP, convert them into JSON and provide them as a server on local port.
:`udp2json-kafka <../data/configs/udp2json-kafka.xml>`_:
Receive flow data over UDP, convert them into JSON and send them to Apache Kafka.
:`tcp2anon2json <../data/configs/tcp2anon2json.xml>`_:
Receive flow data over TCP, anonymize them and store in JSON format on a local drive.
:`tcpUdp2lnf <../data/configs/tcpUdp2lnf.xml>`_:
Expand Down
Loading

0 comments on commit 0744bcf

Please sign in to comment.