diff --git a/.codeclimate.yml b/.codeclimate.yml
index 0e443ca..563da6b 100644
--- a/.codeclimate.yml
+++ b/.codeclimate.yml
@@ -1,10 +1,10 @@
engines:
eslint:
enabled: true
- channel: "eslint-8"
+ channel: 'eslint-9'
config:
- config: ".eslintrc.yaml"
+ config: 'eslint.config.mjs'
ratings:
- paths:
- - "**.js"
+ paths:
+ - '**.js'
diff --git a/.eslintrc.yaml b/.eslintrc.yaml
deleted file mode 100644
index fe947ea..0000000
--- a/.eslintrc.yaml
+++ /dev/null
@@ -1,25 +0,0 @@
-env:
- node: true
- es6: true
- mocha: true
- es2020: true
-
-plugins:
- - haraka
-
-extends:
- - eslint:recommended
- - plugin:haraka/recommended
-
-rules:
- indent: [2, 2, {"SwitchCase": 1}]
-
-root: true
-
-globals:
- OK: true
- CONT: true
- DENY: true
- DENYSOFT: true
- DENYDISCONNECT: true
- DENYSOFTDISCONNECT: true
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
index afafec5..bd8fb43 100644
--- a/.github/PULL_REQUEST_TEMPLATE.md
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -1,10 +1,12 @@
Fixes #
Changes proposed in this pull request:
--
--
+
+-
+-
Checklist:
+
- [ ] docs updated
- [ ] tests updated
- [ ] Changes.md updated
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
index 0449e4a..d450132 100644
--- a/.github/dependabot.yml
+++ b/.github/dependabot.yml
@@ -2,9 +2,9 @@
version: 2
updates:
- - package-ecosystem: "npm"
- directory: "/"
+ - package-ecosystem: 'npm'
+ directory: '/'
schedule:
- interval: "weekly"
+ interval: 'weekly'
allow:
- dependency-type: production
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 5360933..0004535 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -1,12 +1,11 @@
name: CI
-on: [ push, pull_request ]
+on: [push, pull_request]
env:
CI: true
jobs:
-
lint:
uses: haraka/.github/.github/workflows/lint.yml@master
@@ -14,28 +13,10 @@ jobs:
# uses: haraka/.github/.github/workflows/coverage.yml@master
# secrets: inherit
- test:
- needs: [ lint, get-lts ]
- runs-on: ${{ matrix.os }}
- strategy:
- matrix:
- os: [ ubuntu-latest, windows-latest ]
- node-version: ${{ fromJson(needs.get-lts.outputs.active) }}
- fail-fast: false
- steps:
- - uses: actions/checkout@v3
- - uses: actions/setup-node@v3
- name: Node ${{ matrix.node-version }} on ${{ matrix.os }}
- with:
- node-version: ${{ matrix.node-version }}
- - run: npm install
- - run: npm test
+ ubuntu:
+ needs: [lint]
+ uses: haraka/.github/.github/workflows/ubuntu.yml@master
- get-lts:
- runs-on: ubuntu-latest
- steps:
- - id: get
- uses: msimerson/node-lts-versions@v1
- outputs:
- active: ${{ steps.get.outputs.active }}
- lts: ${{ steps.get.outputs.lts }}
+ windows:
+ needs: [lint]
+ uses: haraka/.github/.github/workflows/windows.yml@master
\ No newline at end of file
diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml
index 383aca2..816e8c3 100644
--- a/.github/workflows/codeql.yml
+++ b/.github/workflows/codeql.yml
@@ -1,10 +1,10 @@
-name: "CodeQL"
+name: 'CodeQL'
on:
push:
- branches: [ master ]
+ branches: [master]
pull_request:
- branches: [ master ]
+ branches: [master]
schedule:
- cron: '18 7 * * 4'
diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml
index 42a9bb9..e81c15f 100644
--- a/.github/workflows/publish.yml
+++ b/.github/workflows/publish.yml
@@ -4,6 +4,8 @@ on:
push:
branches:
- master
+ paths:
+ - package.json
env:
CI: true
@@ -11,4 +13,4 @@ env:
jobs:
publish:
uses: haraka/.github/.github/workflows/publish.yml@master
- secrets: inherit
\ No newline at end of file
+ secrets: inherit
diff --git a/.npmignore b/.npmignore
deleted file mode 100644
index 3e8e260..0000000
--- a/.npmignore
+++ /dev/null
@@ -1,58 +0,0 @@
-# Logs
-logs
-*.log
-npm-debug.log*
-
-# Runtime data
-pids
-*.pid
-*.seed
-
-# Directory for instrumented libs generated by jscoverage/JSCover
-lib-cov
-
-# Coverage directory used by tools like istanbul
-coverage
-
-# nyc test coverage
-.nyc_output
-
-# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
-.grunt
-
-# node-waf configuration
-.lock-wscript
-
-# Compiled binary addons (http://nodejs.org/api/addons.html)
-build/Release
-
-# Dependency directories
-node_modules
-jspm_packages
-
-# Optional npm cache directory
-.npm
-
-# Optional REPL history
-.node_repl_history
-
-package-lock.json
-bower_components
-# Optional npm cache directory
-.npmrc
-.idea
-.DS_Store
-haraka-update.sh
-
-.github
-.release
-.codeclimate.yml
-.editorconfig
-.gitignore
-.gitmodules
-.lgtm.yml
-appveyor.yml
-codecov.yml
-.travis.yml
-.eslintrc.yaml
-.eslintrc.json
diff --git a/.prettierrc b/.prettierrc
new file mode 100644
index 0000000..8ded5e0
--- /dev/null
+++ b/.prettierrc
@@ -0,0 +1,2 @@
+singleQuote: true
+semi: false
diff --git a/.release b/.release
index 0890e94..0bf2a09 160000
--- a/.release
+++ b/.release
@@ -1 +1 @@
-Subproject commit 0890e945e4e061c96c7b2ab45017525904c17728
+Subproject commit 0bf2a098d4792848c2103dfce0f911e00a14709e
diff --git a/Changes.md b/CHANGELOG.md
similarity index 65%
rename from Changes.md
rename to CHANGELOG.md
index ad4c3bd..d24d7a5 100644
--- a/Changes.md
+++ b/CHANGELOG.md
@@ -1,40 +1,40 @@
-
### Unreleased
+### [1.0.9] - 2025-01-08
-### [1.0.8] - 2023-05-25
+- doc: mv Changes -> CHANGELOG.md, add CONTRIBUTORS
+- ci: update to point to shared configs
+- style: automated code formatting with prettier
+- lint: remove duplicate / stale rules from .eslintrc
+- dep: eslint-plugin-haraka -> @haraka/eslint-config
+- populate [files] in package.json. Delete .npmignore.
-#### Changed
+### [1.0.8] - 2023-05-25
- doc(README) Update config file name #9
-
### 1.0.7 - 2023-01-05
- handle Spamhaus DQS (#5): add a dqs_key config option and a [dbl.dq.spamhaus.net] zone, disabled by default
-
### 1.0.6 - 2022-11-28
- test: increase timeout for DNSBL test
-
### 1.0.5 - 2022-08-29
- fix #2 change Spamhaus defaults to not assume errors as positives
- warn instead of debug when result do not validate
-
### [1.0.4] - 2022-07-23
- updated package.json
-
### 1.0.3 - 2022-07-23
- Import from Haraka
-
[1.0.4]: https://github.com/haraka/haraka-plugin-uribl/releases/tag/1.0.4
[1.0.6]: https://github.com/haraka/haraka-plugin-uribl/releases/tag/1.0.6
[1.0.8]: https://github.com/haraka/haraka-plugin-uribl/releases/tag/1.0.8
+[1.0.9]: https://github.com/haraka/haraka-plugin-uribl/releases/tag/1.0.9
diff --git a/CONTRIBUTORS.md b/CONTRIBUTORS.md
new file mode 100644
index 0000000..dd7c63c
--- /dev/null
+++ b/CONTRIBUTORS.md
@@ -0,0 +1,9 @@
+# Contributors
+
+This handcrafted artisinal software is brought to you by:
+
+|
msimerson (21)|
DoobleD (1)|
lnedry (1)|
+| :---: | :---: | :---: |
+
+this file is generated by [.release](https://github.com/msimerson/.release).
+Contribute to this project to get your GitHub profile included here.
diff --git a/README.md b/README.md
index 1cec476..77587ca 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,5 @@
[![CI Test Status][ci-img]][ci-url]
[![Code Climate][clim-img]][clim-url]
-[![NPM][npm-img]][npm-url]
# haraka-plugin-uribl
@@ -8,8 +7,7 @@ This plugin extracts URIs and feeds them to RHS based blacklists such as [DBL][1
This plugin will discard any domain name that does not have a valid TLD or any IP address within RFC1918, 127/8 or 169.254/16 (APIPA) and will convert any URI ending in in-addr.arpa into an IP address lookup.
-Configuration
--------------
+## Configuration
This plugin reads configuration from uribl.ini.
@@ -17,15 +15,15 @@ The main section defines global settings for all lists and the blacklists zones
The main section can contain the following options:
-* timeout
+- timeout
Default: 30
- The total timeout in seconds for each group of lookups. Any group of
+ The total timeout in seconds for each group of lookups. Any group of
lookups that takes longer than this will be aborted and the session
will continue.
-* max\_uris\_per\_list
+- max_uris_per_list
Default: 20
@@ -35,79 +33,73 @@ List sections should be named as the zone of the blacklist and can contain the f
At least one of the following must be set for any queries to be run for the blacklist.
-* rdns = 1 | true | yes | on | enabled
+- rdns = 1 | true | yes | on | enabled
Check any rDNS names against the list.
-* helo = 1 | true | yes | on | enabled
+- helo = 1 | true | yes | on | enabled
Check the EHLO/HELO argument against the list.
-* envfrom = 1 | true | yes | on | enabled
+- envfrom = 1 | true | yes | on | enabled
Check the MAIL FROM domain against the list.
-* from = 1 | true | yes | on | enabled
+- from = 1 | true | yes | on | enabled
Check the domain portion of the From: header against the list.
-* replyto = 1 | true | yes | on | enabled
+- replyto = 1 | true | yes | on | enabled
Check the domain portion of the Reply-To: header against the list.
-* msgid = 1 | true | yes | on | enabled
+- msgid = 1 | true | yes | on | enabled
Check the RHS of the Message-Id: header against the list.
-* body = 1 | true | yes | on | enabled
+- body = 1 | true | yes | on | enabled
Check any URIs found within the body of the message against the list.
The following are optional for each list:
-* custom\_msg
+- custom_msg
A custom rejection message that will be returned to the SMTP client if the list returns a positive result. If found within the string {uri} will be replaced by the URI value looked up and {zone} will be replaced by the blacklist zone name.
-* validate
+- validate
- A regular expression that will be tested against the first A record returned by the list. If it does not evaluate to true then the positive result will be discarded. Example: ^(?!127\.0\.1\.255)127\. would check that the IP address returned start with 127. and is not 127.0.1.255
+ A regular expression that will be tested against the first A record returned by the list. If it does not evaluate to true then the positive result will be discarded. Example: ^(?!127\.0\.1\.255)127\. would check that the IP address returned start with 127. and is not 127.0.1.255
-* bitmask
+- bitmask
- This is optionally used for lists such as [SURBL][3] and [URIBL][4] that return bitmask values in the last octet of the returned IP address to combine multiple lists into a single zone. Using this you may specify which lists within the zone you want use.
+ This is optionally used for lists such as [SURBL][3] and [URIBL][4] that return bitmask values in the last octet of the returned IP address to combine multiple lists into a single zone. Using this you may specify which lists within the zone you want use.
-* no\_ip\_lookups = 1 | true | yes | on | enabled
+- no_ip_lookups = 1 | true | yes | on | enabled
Specifies that no IP addresses should ever be check against this list. This is required for dbl.spamhaus.org.
-* strip\_to\_domain= 1 | true | yes | on | enabled
+- strip_to_domain= 1 | true | yes | on | enabled
- Specifies that the list requires hostnames be stripped down to the domain boundaries prior to querying the list. This is required for the [SURBL][3] and [URIBL][4] lists.
+ Specifies that the list requires hostnames be stripped down to the domain boundaries prior to querying the list. This is required for the [SURBL][3] and [URIBL][4] lists.
-Spamhaus DQS
-------------
+## Spamhaus DQS
-* dqs_key
+- dqs_key
DQS key for Spamhaus's DQS mirrors.
-Other files
------------
+## Other files
-* data.uribl.excludes
+- data.uribl.excludes
This contains a list of domains that should never be looked up in any blacklist as they are known good and will never be listed. This helps to keep useless queries to a minimum.
-
[1]: http://www.spamhaus.org/dbl
[2]: http://spameatingmonkey.com/lists.html#SEM-FRESH
[3]: http://www.surbl.org/
[4]: http://www.uribl.com/
-
[ci-img]: https://github.com/haraka/haraka-plugin-uribl/actions/workflows/ci.yml/badge.svg
[ci-url]: https://github.com/haraka/haraka-plugin-uribl/actions/workflows/ci.yml
[clim-img]: https://codeclimate.com/github/haraka/haraka-plugin-uribl/badges/gpa.svg
[clim-url]: https://codeclimate.com/github/haraka/haraka-plugin-uribl
-[npm-img]: https://nodei.co/npm/haraka-plugin-uribl.png
-[npm-url]: https://www.npmjs.com/package/haraka-plugin-uribl
diff --git a/eslint.config.mjs b/eslint.config.mjs
new file mode 100644
index 0000000..cd75fea
--- /dev/null
+++ b/eslint.config.mjs
@@ -0,0 +1,25 @@
+import globals from 'globals'
+import path from 'node:path'
+import { fileURLToPath } from 'node:url'
+import js from '@eslint/js'
+import { FlatCompat } from '@eslint/eslintrc'
+
+const __filename = fileURLToPath(import.meta.url)
+const __dirname = path.dirname(__filename)
+const compat = new FlatCompat({
+ baseDirectory: __dirname,
+ recommendedConfig: js.configs.recommended,
+ allConfig: js.configs.all,
+})
+
+export default [
+ ...compat.extends('@haraka'),
+ {
+ languageOptions: {
+ globals: {
+ ...globals.node,
+ ...globals.mocha,
+ },
+ },
+ },
+]
diff --git a/index.js b/index.js
index d146038..4363f6c 100644
--- a/index.js
+++ b/index.js
@@ -1,44 +1,45 @@
// Look up URLs in SURBL
-const url = require('url');
-const dns = require('dns');
-const net = require('net');
-const tlds = require('haraka-tld');
+const url = require('url')
+const dns = require('dns')
+const net = require('net')
+const tlds = require('haraka-tld')
-const net_utils = require('haraka-net-utils');
-const utils = require('haraka-utils');
+const net_utils = require('haraka-net-utils')
+const utils = require('haraka-utils')
// Default regexps to extract the URIs from the message
-const numeric_ip = /\w{3,16}:\/+(\S+@)?(\d+|0[xX][0-9A-Fa-f]+)\.(\d+|0[xX][0-9A-Fa-f]+)\.(\d+|0[xX][0-9A-Fa-f]+)\.(\d+|0[xX][0-9A-Fa-f]+)/gi;
-let schemeless = /(?:%(?:25)?(?:2F|3D|40))?((?:www\.)?[a-zA-Z0-9][a-zA-Z0-9\-.]{0,250}\.(?:aero|arpa|asia|biz|cat|com|coop|edu|gov|info|int|jobs|mil|mobi|museum|name|net|org|pro|tel|travel|xxx|[a-zA-Z]{2}))(?!\w)/gi;
-let schemed = /(\w{3,16}:\/+(?:\S+@)?([a-zA-Z0-9][a-zA-Z0-9\-.]+\.(?:aero|arpa|asia|biz|cat|com|coop|edu|gov|info|int|jobs|mil|mobi|museum|name|net|org|pro|tel|travel|xxx|[a-zA-Z]{2})))(?!\w)/gi;
+const numeric_ip =
+ /\w{3,16}:\/+(\S+@)?(\d+|0[xX][0-9A-Fa-f]+)\.(\d+|0[xX][0-9A-Fa-f]+)\.(\d+|0[xX][0-9A-Fa-f]+)\.(\d+|0[xX][0-9A-Fa-f]+)/gi
+let schemeless =
+ /(?:%(?:25)?(?:2F|3D|40))?((?:www\.)?[a-zA-Z0-9][a-zA-Z0-9\-.]{0,250}\.(?:aero|arpa|asia|biz|cat|com|coop|edu|gov|info|int|jobs|mil|mobi|museum|name|net|org|pro|tel|travel|xxx|[a-zA-Z]{2}))(?!\w)/gi
+let schemed =
+ /(\w{3,16}:\/+(?:\S+@)?([a-zA-Z0-9][a-zA-Z0-9\-.]+\.(?:aero|arpa|asia|biz|cat|com|coop|edu|gov|info|int|jobs|mil|mobi|museum|name|net|org|pro|tel|travel|xxx|[a-zA-Z]{2})))(?!\w)/gi
-const excludes = {};
+const excludes = {}
exports.register = function () {
-
// Override regexps if top_level_tlds file is present
if (tlds.top_level_tlds && Object.keys(tlds.top_level_tlds).length) {
- this.logdebug('Building new regexps from TLD file');
- const re_schemeless = `(?:%(?:25)?(?:2F|3D|40))?((?:www\\.)?[a-zA-Z0-9][a-zA-Z0-9\\-.]{0,250}\\.(?:${Object.keys(tlds.top_level_tlds).join('|')}))(?!\\w)`;
- schemeless = new RegExp(re_schemeless, 'gi');
- const re_schemed = `(\\w{3,16}:\\/+(?:\\S+@)?([a-zA-Z0-9][a-zA-Z0-9\\-.]+\\.(?:${Object.keys(tlds.top_level_tlds).join('|')})))(?!\\w)`;
- schemed = new RegExp(re_schemed, 'gi');
+ this.logdebug('Building new regexps from TLD file')
+ const re_schemeless = `(?:%(?:25)?(?:2F|3D|40))?((?:www\\.)?[a-zA-Z0-9][a-zA-Z0-9\\-.]{0,250}\\.(?:${Object.keys(tlds.top_level_tlds).join('|')}))(?!\\w)`
+ schemeless = new RegExp(re_schemeless, 'gi')
+ const re_schemed = `(\\w{3,16}:\\/+(?:\\S+@)?([a-zA-Z0-9][a-zA-Z0-9\\-.]+\\.(?:${Object.keys(tlds.top_level_tlds).join('|')})))(?!\\w)`
+ schemed = new RegExp(re_schemed, 'gi')
}
this.load_uribl_ini()
this.load_uribl_exludes()
if (this.zones.length === 0) {
- this.logerror('aborting: no zones configured');
- }
- else {
- this.register_hook('lookup_rdns', 'lookup_remote_ip');
- this.register_hook('helo' , 'lookup_ehlo')
- this.register_hook('ehlo' , 'lookup_ehlo')
- this.register_hook('mail' , 'lookup_mailfrom')
- this.register_hook('data' , 'enable_body_parsing')
- this.register_hook('data_post' , 'lookup_header_zones')
+ this.logerror('aborting: no zones configured')
+ } else {
+ this.register_hook('lookup_rdns', 'lookup_remote_ip')
+ this.register_hook('helo', 'lookup_ehlo')
+ this.register_hook('ehlo', 'lookup_ehlo')
+ this.register_hook('mail', 'lookup_mailfrom')
+ this.register_hook('data', 'enable_body_parsing')
+ this.register_hook('data_post', 'lookup_header_zones')
}
}
@@ -48,210 +49,241 @@ exports.load_uribl_ini = function () {
plugin.load_uribl_ini()
})
- this.zones = Object.keys(this.cfg).filter(a => a !== 'main')
+ this.zones = Object.keys(this.cfg).filter((a) => a !== 'main')
// defaults
if (!this.cfg.main.max_uris_per_list) {
- this.cfg.main.max_uris_per_list = 20;
+ this.cfg.main.max_uris_per_list = 20
}
}
exports.load_uribl_exludes = function () {
- this.config.get('uribl.excludes', 'list').forEach(domain => {
- excludes[domain.toLowerCase()] = 1;
- });
+ this.config.get('uribl.excludes', 'list').forEach((domain) => {
+ excludes[domain.toLowerCase()] = 1
+ })
}
-function check_excludes_list (host) {
- host = host.split('.').reverse();
- for (let i=0; i < host.length; i++) {
- let check;
+function check_excludes_list(host) {
+ host = host.split('.').reverse()
+ for (let i = 0; i < host.length; i++) {
+ let check
if (i === 0) {
- check = host[i];
+ check = host[i]
+ } else {
+ check = [host[i], check].join('.')
}
- else {
- check = [ host[i], check ].join('.');
- }
- if (excludes[check]) return true;
+ if (excludes[check]) return true
}
- return false;
+ return false
}
// IS: IPv6 compatible (maybe; if the BL supports IPv6 requests)
exports.do_lookups = function (connection, next, hosts, type) {
- const plugin = this;
+ const plugin = this
// Store the results in the correct place based on the lookup type
- const results = connection?.transaction?.results || connection?.results;
- if (!results) return next();
+ const results = connection?.transaction?.results || connection?.results
+ if (!results) return next()
- if (typeof hosts === 'string') hosts = [ hosts ];
+ if (typeof hosts === 'string') hosts = [hosts]
if (!hosts || !hosts.length) {
- connection.logdebug(plugin, `(${type}) no items found for lookup`);
- results.add(plugin, {skip: type});
- return next();
+ connection.logdebug(plugin, `(${type}) no items found for lookup`)
+ results.add(plugin, { skip: type })
+ return next()
}
- connection.logdebug(plugin, `(${type}) found ${hosts.length} items for lookup` );
- utils.shuffle(hosts);
+ connection.logdebug(
+ plugin,
+ `(${type}) found ${hosts.length} items for lookup`,
+ )
+ utils.shuffle(hosts)
- let j;
- const queries = {};
+ let j
+ const queries = {}
for (let host of hosts) {
- host = host.toLowerCase();
- connection.logdebug(plugin, `(${type}) checking: ${host}`);
+ host = host.toLowerCase()
+ connection.logdebug(plugin, `(${type}) checking: ${host}`)
// Make sure we have a valid TLD
- if (!net.isIPv4(host) && !net.isIPv6(host) && !tlds.top_level_tlds[(host.split('.').reverse())[0]]) {
- continue;
+ if (
+ !net.isIPv4(host) &&
+ !net.isIPv6(host) &&
+ !tlds.top_level_tlds[host.split('.').reverse()[0]]
+ ) {
+ continue
}
// Check the exclusion list
if (check_excludes_list(host)) {
- results.add(plugin, {skip: `excluded domain:${host}`});
- continue;
+ results.add(plugin, { skip: `excluded domain:${host}` })
+ continue
}
// Loop through the zones
- for (j=0; j < plugin.zones.length; j++) {
- const zone = plugin.zones[j];
- if (zone === 'main') continue; // skip config
- if (!plugin.cfg[zone] || (plugin.cfg[zone] && !/^(?:1|true|yes|enabled|on)$/i.test(plugin.cfg[zone][type]))) {
- results.add(plugin, {skip: `${type} unsupported for ${zone}` });
- continue;
+ for (j = 0; j < plugin.zones.length; j++) {
+ const zone = plugin.zones[j]
+ if (zone === 'main') continue // skip config
+ if (
+ !plugin.cfg[zone] ||
+ (plugin.cfg[zone] &&
+ !/^(?:1|true|yes|enabled|on)$/i.test(plugin.cfg[zone][type]))
+ ) {
+ results.add(plugin, { skip: `${type} unsupported for ${zone}` })
+ continue
}
// Convert in-addr.arpa into bare IPv4/v6 lookup
- const arpa = host.split(/\./).reverse();
- if (arpa.shift() === 'arpa'){
- const ip_format = arpa.shift();
- if ( ip_format === 'in-addr') {
- if (arpa.length < 4) continue; // Only full IP addresses
- host = arpa.join('.');
- }
- else if ( ip_format === 'ip6') {
- if (arpa.length < 32) continue; // Only full IP addresses
- host = arpa.join('.');
+ const arpa = host.split(/\./).reverse()
+ if (arpa.shift() === 'arpa') {
+ const ip_format = arpa.shift()
+ if (ip_format === 'in-addr') {
+ if (arpa.length < 4) continue // Only full IP addresses
+ host = arpa.join('.')
+ } else if (ip_format === 'ip6') {
+ if (arpa.length < 32) continue // Only full IP addresses
+ host = arpa.join('.')
}
}
- let lookup;
+ let lookup
// Handle zones that do not allow IP queries (e.g. Spamhaus DBL)
if (net.isIPv4(host)) {
- if (/^(?:1|true|yes|enabled|on)$/i.test(plugin.cfg[zone].no_ip_lookups)) {
- results.add(plugin, {skip: `IP (${host}) not supported for ${zone}` });
- continue;
+ if (
+ /^(?:1|true|yes|enabled|on)$/i.test(plugin.cfg[zone].no_ip_lookups)
+ ) {
+ results.add(plugin, {
+ skip: `IP (${host}) not supported for ${zone}`,
+ })
+ continue
}
// Skip any private IPs
if (net_utils.is_private_ip(host)) {
- results.add(plugin, {skip: 'private IP' });
- continue;
+ results.add(plugin, { skip: 'private IP' })
+ continue
}
// Reverse IP for lookup
- lookup = host.split(/\./).reverse().join('.');
- }
- else if (net.isIPv6(host)) {
- if (/^(?:1|true|yes|enabled|on)$/i.test(plugin.cfg[zone].not_ipv6_compatible) || /^(?:1|true|yes|enabled|on)$/i.test(plugin.cfg[zone].no_ip_lookups)) {
- results.add(plugin, {skip: `IP (${host}) not supported for ${zone}` });
- continue;
+ lookup = host.split(/\./).reverse().join('.')
+ } else if (net.isIPv6(host)) {
+ if (
+ /^(?:1|true|yes|enabled|on)$/i.test(
+ plugin.cfg[zone].not_ipv6_compatible,
+ ) ||
+ /^(?:1|true|yes|enabled|on)$/i.test(plugin.cfg[zone].no_ip_lookups)
+ ) {
+ results.add(plugin, {
+ skip: `IP (${host}) not supported for ${zone}`,
+ })
+ continue
}
// Skip any private IPs
if (net_utils.is_private_ip(host)) {
- results.add(plugin, {skip: 'private IP' });
- continue;
+ results.add(plugin, { skip: 'private IP' })
+ continue
}
// Reverse IP for lookup
- lookup = net_utils.ipv6_reverse(host);
+ lookup = net_utils.ipv6_reverse(host)
}
// Handle zones that require host to be stripped to a domain boundary
- else if (/^(?:1|true|yes|enabled|on)$/i.test(plugin.cfg[zone].strip_to_domain)) {
- lookup = (tlds.split_hostname(host, 3))[1];
+ else if (
+ /^(?:1|true|yes|enabled|on)$/i.test(plugin.cfg[zone].strip_to_domain)
+ ) {
+ lookup = tlds.split_hostname(host, 3)[1]
}
// Anything else..
else {
- lookup = host;
+ lookup = host
}
- if (!lookup) continue;
+ if (!lookup) continue
if (plugin.cfg[zone].dqs_key) {
- lookup = `${lookup}.${plugin.cfg[zone].dqs_key}`;
+ lookup = `${lookup}.${plugin.cfg[zone].dqs_key}`
}
- if (!queries[zone]) queries[zone] = {};
- if (Object.keys(queries[zone]).length > plugin.cfg.main.max_uris_per_list) {
- connection.logwarn(plugin, `discarding lookup ${lookup} for zone ${zone} maximum query limit reached`);
- results.add(plugin, {skip: `max query limit for ${zone}` });
- continue;
+ if (!queries[zone]) queries[zone] = {}
+ if (
+ Object.keys(queries[zone]).length > plugin.cfg.main.max_uris_per_list
+ ) {
+ connection.logwarn(
+ plugin,
+ `discarding lookup ${lookup} for zone ${zone} maximum query limit reached`,
+ )
+ results.add(plugin, { skip: `max query limit for ${zone}` })
+ continue
}
- queries[zone][lookup] = 1;
+ queries[zone][lookup] = 1
}
}
// Flatten object into array for easier querying
- const queries_to_run = [];
- for (j=0; j < Object.keys(queries).length; j++) {
+ const queries_to_run = []
+ for (j = 0; j < Object.keys(queries).length; j++) {
for (const query of Object.keys(queries[Object.keys(queries)[j]])) {
// host/domain, zone
- queries_to_run.push( [ query, Object.keys(queries)[j] ] );
+ queries_to_run.push([query, Object.keys(queries)[j]])
}
}
if (!queries_to_run.length) {
- results.add(plugin, {skip: `${type} (no queries)` });
- return next();
+ results.add(plugin, { skip: `${type} (no queries)` })
+ return next()
}
- utils.shuffle(queries_to_run); // Randomize the order
+ utils.shuffle(queries_to_run) // Randomize the order
// Perform the lookups
- let pending_queries = 0;
+ let pending_queries = 0
- let called_next = false;
- function nextOnce (code, msg) {
- if (called_next) return;
- called_next = true;
- next(code, msg);
+ let called_next = false
+ function nextOnce(code, msg) {
+ if (called_next) return
+ called_next = true
+ next(code, msg)
}
- function conclude_if_no_pending () {
- if (pending_queries !== 0) return;
- results.add(plugin, {pass: type});
- nextOnce();
+ function conclude_if_no_pending() {
+ if (pending_queries !== 0) return
+ results.add(plugin, { pass: type })
+ nextOnce()
}
- queries_to_run.forEach(query => {
- let lookup = query.join('.');
+ queries_to_run.forEach((query) => {
+ let lookup = query.join('.')
// Add root dot if necessary
- if (lookup[lookup.length-1] !== '.') {
- lookup = `${lookup}.`;
+ if (lookup[lookup.length - 1] !== '.') {
+ lookup = `${lookup}.`
}
- pending_queries++;
+ pending_queries++
dns.resolve4(lookup, (err, addrs) => {
+ pending_queries--
+ connection.logdebug(
+ plugin,
+ `${lookup} => (${err ? err : addrs.join(', ')})`,
+ )
- pending_queries--;
- connection.logdebug(plugin, `${lookup} => (${(err) ? err : addrs.join(', ')})`);
-
- if (err) return conclude_if_no_pending();
+ if (err) return conclude_if_no_pending()
- let skip = false;
- function do_reject (msg) {
- if (skip) return;
- if (called_next) return;
- if (!msg) msg = `${query[0]} blacklisted in ${query[1]}`;
+ let skip = false
+ function do_reject(msg) {
+ if (skip) return
+ if (called_next) return
+ if (!msg) msg = `${query[0]} blacklisted in ${query[1]}`
// Check for custom message
if (plugin.cfg[query[1]] && plugin.cfg[query[1]].custom_msg) {
msg = plugin.cfg[query[1]].custom_msg
- .replace(/\{uri\}/g, query[0])
- .replace(/\{zone\}/g, query[1]);
+ .replace(/\{uri\}/g, query[0])
+ .replace(/\{zone\}/g, query[1])
}
- results.add(plugin, {fail: [type, query[0], query[1]].join('/') });
- nextOnce(DENY, msg);
+ results.add(plugin, { fail: [type, query[0], query[1]].join('/') })
+ nextOnce(DENY, msg)
}
// Optionally validate first result against a regexp
if (plugin.cfg[query[1]] && plugin.cfg[query[1]].validate) {
- const re = new RegExp(plugin.cfg[query[1]].validate);
+ const re = new RegExp(plugin.cfg[query[1]].validate)
if (!re.test(addrs[0])) {
- connection.logwarn(plugin, `ignoring result (${addrs[0]}) for: ${lookup} as it did not match validation rule`);
- skip = true;
+ connection.logwarn(
+ plugin,
+ `ignoring result (${addrs[0]}) for: ${lookup} as it did not match validation rule`,
+ )
+ skip = true
}
}
@@ -259,52 +291,61 @@ exports.do_lookups = function (connection, next, hosts, type) {
if (plugin.cfg[query[1]] && plugin.cfg[query[1]].bitmask) {
// A bitmask zone should only return a single result
// We only support a bitmask of up to 128 in a single octet
- const last_octet = Number((addrs[0].split('.'))[3]);
- const bitmask = Number(plugin.cfg[query[1]].bitmask);
+ const last_octet = Number(addrs[0].split('.')[3])
+ const bitmask = Number(plugin.cfg[query[1]].bitmask)
if ((last_octet & bitmask) > 0) {
- connection.loginfo(plugin, `found ${query[0]} in zone ${query[1]} (${addrs.join(',')}; bitmask=${bitmask})`);
- do_reject();
- }
- else {
- connection.logdebug(plugin, `ignoring result (${addrs[0]}) for: ${lookup} as the bitmask did not match`);
- skip = true;
+ connection.loginfo(
+ plugin,
+ `found ${query[0]} in zone ${query[1]} (${addrs.join(',')}; bitmask=${bitmask})`,
+ )
+ do_reject()
+ } else {
+ connection.logdebug(
+ plugin,
+ `ignoring result (${addrs[0]}) for: ${lookup} as the bitmask did not match`,
+ )
+ skip = true
}
- }
- else {
- connection.loginfo(plugin, `found ${query[0]} in zone ${query[1]} (${addrs.join(',')})`);
- do_reject();
+ } else {
+ connection.loginfo(
+ plugin,
+ `found ${query[0]} in zone ${query[1]} (${addrs.join(',')})`,
+ )
+ do_reject()
}
- conclude_if_no_pending();
- });
- });
+ conclude_if_no_pending()
+ })
+ })
- conclude_if_no_pending();
+ conclude_if_no_pending()
}
-function getTimedNext (plugin, connection, next, type) {
-
+function getTimedNext(plugin, connection, next, type) {
let timer
let calledNext = false
- function timedNextOnce (code, msg) {
- clearTimeout(timer);
- if (calledNext) return;
- calledNext = true;
- next(code, msg);
+ function timedNextOnce(code, msg) {
+ clearTimeout(timer)
+ if (calledNext) return
+ calledNext = true
+ next(code, msg)
}
- timer = setTimeout(() => {
- connection.logdebug(plugin, 'timeout');
- connection.results.add(plugin, {err: `${type} timeout` });
- timedNextOnce();
- }, ((plugin.cfg.main?.timeout || 30) - 2) * 1000);
+ timer = setTimeout(
+ () => {
+ connection.logdebug(plugin, 'timeout')
+ connection.results.add(plugin, { err: `${type} timeout` })
+ timedNextOnce()
+ },
+ ((plugin.cfg.main?.timeout || 30) - 2) * 1000,
+ )
return timedNextOnce
}
exports.lookup_remote_ip = function (next, connection) {
- const plugin = this;
+ const plugin = this
const timedNext = getTimedNext(plugin, connection, next, 'rdns')
@@ -313,14 +354,14 @@ exports.lookup_remote_ip = function (next, connection) {
switch (err.code) {
case dns.NXDOMAIN:
case dns.NOTFOUND:
- break;
+ break
default:
- connection.results.add(plugin, {err });
+ connection.results.add(plugin, { err })
}
- return timedNext();
+ return timedNext()
}
// console.log(`lookup_remote_ip, ${connection.remote.ip} resolves to ${rdns}`)
- plugin.do_lookups(connection, timedNext, rdns, 'rdns');
+ plugin.do_lookups(connection, timedNext, rdns, 'rdns')
})
}
@@ -328,127 +369,126 @@ exports.lookup_ehlo = function (next, connection, helo) {
const timedNext = getTimedNext(this, connection, next, 'helo')
// Handle IP literals
- let literal;
- if ((literal = net_utils.get_ipany_re('^\\[(?:IPv6:)?', '\\]$','').exec(helo))) {
- this.do_lookups(connection, timedNext, literal[1], 'helo');
- }
- else {
- this.do_lookups(connection, timedNext, helo, 'helo');
+ let literal
+ if (
+ (literal = net_utils.get_ipany_re('^\\[(?:IPv6:)?', '\\]$', '').exec(helo))
+ ) {
+ this.do_lookups(connection, timedNext, literal[1], 'helo')
+ } else {
+ this.do_lookups(connection, timedNext, helo, 'helo')
}
}
exports.lookup_mailfrom = function (next, connection, params) {
const timedNext = getTimedNext(this, connection, next, 'envfrom')
- this.do_lookups(connection, timedNext, params[0].host, 'envfrom');
+ this.do_lookups(connection, timedNext, params[0].host, 'envfrom')
}
exports.enable_body_parsing = (next, connection) => {
if (connection?.transaction) {
- connection.transaction.parse_body = true;
+ connection.transaction.parse_body = true
}
- next();
+ next()
}
exports.lookup_header_zones = function (next, connection) {
-
- const email_re = /[^@]+@([^> ]+)>?/;
- const plugin = this;
- const trans = connection.transaction;
+ const email_re = /[^@]+@([^> ]+)>?/
+ const plugin = this
+ const trans = connection.transaction
const timedNext = getTimedNext(this, connection, next, 'ms, typeg')
// From header
- function do_from_header (cb) {
- const from = trans.header.get_decoded('from');
- const fmatch = email_re.exec(from);
+ function do_from_header(cb) {
+ const from = trans.header.get_decoded('from')
+ const fmatch = email_re.exec(from)
if (fmatch) {
- return plugin.do_lookups(connection, cb, fmatch[1], 'from');
+ return plugin.do_lookups(connection, cb, fmatch[1], 'from')
}
- cb();
+ cb()
}
// Reply-To header
- function do_replyto_header (cb) {
- const replyto = trans.header.get('reply-to');
- const rmatch = email_re.exec(replyto);
+ function do_replyto_header(cb) {
+ const replyto = trans.header.get('reply-to')
+ const rmatch = email_re.exec(replyto)
if (rmatch) {
- return plugin.do_lookups(connection, cb, rmatch[1], 'replyto');
+ return plugin.do_lookups(connection, cb, rmatch[1], 'replyto')
}
- cb();
+ cb()
}
// Message-Id header
- function do_msgid_header (cb) {
- const msgid = trans.header.get('message-id');
- const mmatch = /@([^>]+)>/.exec(msgid);
+ function do_msgid_header(cb) {
+ const msgid = trans.header.get('message-id')
+ const mmatch = /@([^>]+)>/.exec(msgid)
if (mmatch) {
- return plugin.do_lookups(connection, cb, mmatch[1], 'msgid');
+ return plugin.do_lookups(connection, cb, mmatch[1], 'msgid')
}
- cb();
+ cb()
}
// Body
- function do_body (cb) {
- const urls = {};
- extract_urls(urls, trans.body, connection, plugin);
- plugin.do_lookups(connection, cb, Object.keys(urls), 'body');
+ function do_body(cb) {
+ const urls = {}
+ extract_urls(urls, trans.body, connection, plugin)
+ plugin.do_lookups(connection, cb, Object.keys(urls), 'body')
}
- const chain = [ do_from_header, do_replyto_header, do_msgid_header, do_body ];
- function chain_caller (code, msg) {
- if (code) return timedNext(code, msg);
+ const chain = [do_from_header, do_replyto_header, do_msgid_header, do_body]
+ function chain_caller(code, msg) {
+ if (code) return timedNext(code, msg)
- if (!chain.length) return timedNext();
+ if (!chain.length) return timedNext()
- const next_in_chain = chain.shift();
- next_in_chain(chain_caller);
+ const next_in_chain = chain.shift()
+ next_in_chain(chain_caller)
}
- chain_caller();
+ chain_caller()
}
-function extract_urls (urls, body, connection, self) {
+function extract_urls(urls, body, connection, self) {
// extract from body.bodytext
- let match;
- if (!body || !body.bodytext) { return; }
+ let match
+ if (!body || !body.bodytext) {
+ return
+ }
- let uri;
+ let uri
// extract numeric URIs
while ((match = numeric_ip.exec(body.bodytext))) {
try {
- uri = url.parse(match[0]);
+ uri = url.parse(match[0])
// Don't reverse the IPs here; we do it in the lookup
- urls[uri.hostname] = uri;
- }
- catch (error) {
- connection.logerror(self, `parse error: ${match[0]} ${error.message}`);
+ urls[uri.hostname] = uri
+ } catch (error) {
+ connection.logerror(self, `parse error: ${match[0]} ${error.message}`)
}
}
// match plain hostname.tld
while ((match = schemeless.exec(body.bodytext))) {
try {
- uri = url.parse(`http://${match[1]}`);
- urls[uri.hostname] = uri;
- }
- catch (error) {
- connection.logerror(self, `parse error: ${match[1]} ${error.message}`);
+ uri = url.parse(`http://${match[1]}`)
+ urls[uri.hostname] = uri
+ } catch (error) {
+ connection.logerror(self, `parse error: ${match[1]} ${error.message}`)
}
}
// match scheme:// URI
while ((match = schemed.exec(body.bodytext))) {
try {
- uri = url.parse(match[1]);
- urls[uri.hostname] = uri;
- }
- catch (error) {
- connection.logerror(self, `parse error: ${match[1]} ${error.message}`);
+ uri = url.parse(match[1])
+ urls[uri.hostname] = uri
+ } catch (error) {
+ connection.logerror(self, `parse error: ${match[1]} ${error.message}`)
}
}
// TODO: URIHASH
// TODO: MAILHASH
- for (let i=0,l=body.children.length; i < l; i++) {
- extract_urls(urls, body.children[i], connection, self);
+ for (let i = 0, l = body.children.length; i < l; i++) {
+ extract_urls(urls, body.children[i], connection, self)
}
}
diff --git a/package.json b/package.json
index ed8d77b..3119d03 100644
--- a/package.json
+++ b/package.json
@@ -1,13 +1,21 @@
{
"name": "haraka-plugin-uribl",
- "version": "1.0.8",
+ "version": "1.0.9",
"description": "Haraka plugin that checks domains in emails against URI blacklists",
+ "files": [
+ "CHANGELOG.md",
+ "config"
+ ],
"main": "index.js",
"scripts": {
+ "format": "npm run prettier:fix && npm run lint:fix",
"lint": "npx eslint *.js test/*.js",
- "lintfix": "npx eslint --fix *.js test/*.js",
+ "lint:fix": "npx eslint --fix *.js test/*.js",
+ "prettier": "npx prettier . --check",
+ "prettier:fix": "npx prettier . --write --log-level=warn",
+ "test": "npx mocha",
"versions": "npx dependency-version-checker check",
- "test": "npx mocha"
+ "versions:fix": "npx dependency-version-checker update"
},
"repository": {
"type": "git",
@@ -25,14 +33,14 @@
},
"homepage": "https://github.com/haraka/haraka-plugin-uribl#readme",
"devDependencies": {
- "eslint": "8",
- "eslint-plugin-haraka": "*",
- "haraka-test-fixtures": "*",
- "mocha": "9"
+ "eslint": "^9.17.0",
+ "@haraka/eslint-config": "^2.0.2",
+ "haraka-test-fixtures": "^1.3.8",
+ "mocha": "^11.1.0"
},
"dependencies": {
- "haraka-net-utils": "^1.4.1",
- "haraka-tld": "^1.1.0",
- "haraka-utils": "^1.0.3"
+ "haraka-net-utils": "^1.7.1",
+ "haraka-tld": "^1.2.2",
+ "haraka-utils": "^1.1.3"
}
}
diff --git a/test/index.js b/test/index.js
index c7d532a..d387b92 100644
--- a/test/index.js
+++ b/test/index.js
@@ -1,8 +1,8 @@
-'use strict';
+'use strict'
// node.js built-in modules
-const assert = require('assert')
-const path = require('path');
+const assert = require('assert')
+const path = require('path')
// npm modules
const fixtures = require('haraka-test-fixtures')
@@ -13,9 +13,9 @@ const fixtures = require('haraka-test-fixtures')
beforeEach(function () {
this.plugin = new fixtures.plugin('uribl')
- this.plugin.config.root_path = path.resolve(__dirname, '../../config');
+ this.plugin.config.root_path = path.resolve(__dirname, '../../config')
- this.plugin.register();
+ this.plugin.register()
})
describe('uribl', function () {
@@ -32,35 +32,43 @@ describe('load_uribl_ini', function () {
})
describe('do_lookups', function () {
-
beforeEach(function () {
- this.connection = fixtures.connection.createConnection();
+ this.connection = fixtures.connection.createConnection()
})
it('lookup_test_ip: 127.0.0.2', function (done) {
- // this.connection.transaction = fixtures.transaction.createTransaction()
- this.plugin.do_lookups(this.connection, (code, msg) => {
- // no result b/c private IP
- assert.equal(code, undefined)
- assert.equal(msg, undefined)
- done()
- }, ['127.0.0.2'], 'body')
+ this.plugin.do_lookups(
+ this.connection,
+ (code, msg) => {
+ // no result b/c private IP
+ assert.equal(code, undefined)
+ assert.equal(msg, undefined)
+ done()
+ },
+ ['127.0.0.2'],
+ 'body',
+ )
})
it('lookup_test_ip: test.uribl.com', function (done) {
this.timeout(4000)
- this.plugin.do_lookups(this.connection, (code, msg) => {
- if (code) console.log(`code: ${code}, ${msg}`)
- assert.equal(code, undefined)
- assert.equal(msg, undefined)
- done()
- }, ['test.uribl.com'], 'body')
+ this.plugin.do_lookups(
+ this.connection,
+ (code, msg) => {
+ if (code) console.log(`code: ${code}, ${msg}`)
+ assert.equal(code, undefined)
+ assert.equal(msg, undefined)
+ done()
+ },
+ ['test.uribl.com'],
+ 'body',
+ )
})
})
describe('lookup_remote_ip', function () {
beforeEach(function () {
- this.connection = fixtures.connection.createConnection();
+ this.connection = fixtures.connection.createConnection()
})
it('lookup_remote_ip: 66.128.51.165', function (done) {