Skip to content

Commit

Permalink
Merge pull request #12 from alertlogic/integration
Browse files Browse the repository at this point in the history
Release 1.0.1
  • Loading branch information
kkuzmin authored Sep 25, 2017
2 parents d7c5114 + c319131 commit 344d2d5
Show file tree
Hide file tree
Showing 19 changed files with 434 additions and 185 deletions.
4 changes: 2 additions & 2 deletions Master/endpoints.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@ const m_appSettings = require('./appsettings');

exports.checkUpdate = function (context, AlertlogicMasterTimer, callback) {
if (process.env.APP_INGEST_ENDPOINT && process.env.APP_AZCOLLECT_ENDPOINT) {
context.log('DEBUG: Reuse Ingest endpoint', process.env.APP_INGEST_ENDPOINT);
context.log('DEBUG: Reuse Azcollect endpoint', process.env.APP_AZCOLLECT_ENDPOINT);
context.log.verbose('Reuse Ingest endpoint', process.env.APP_INGEST_ENDPOINT);
context.log.verbose('Reuse Azcollect endpoint', process.env.APP_AZCOLLECT_ENDPOINT);
return callback(null);
} else {
// Endpoint settings do not exist. Update them.
Expand Down
10 changes: 5 additions & 5 deletions Master/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ module.exports = function (context, AlertlogicMasterTimer) {
if (endpointsError) {
return asyncCallback(endpointsError);
}
context.log('INFO: Alertlogic endpoints updated.');
context.log.info('Alertlogic endpoints updated.');
return asyncCallback(null);
});
},
Expand All @@ -41,7 +41,7 @@ module.exports = function (context, AlertlogicMasterTimer) {
if (azcollectError) {
return asyncCallback(azcollectError);
}
context.log('INFO: O365 source registered', collectorId);
context.log.info('O365 source registered', collectorId);
return asyncCallback(null, azcollectSvc);
});
},
Expand All @@ -52,15 +52,15 @@ module.exports = function (context, AlertlogicMasterTimer) {
if (azcollectError) {
return asyncCallback(`Checkin failed ${azcollectError}`);
}
context.log('INFO: O365 source checkin OK', checkinResp);
context.log.info('O365 source checkin OK', checkinResp);
return asyncCallback(null);
});
}
],
function(error, results) {
if (error) {
context.log('ERROR: Master error ', error);
context.log.error('Master error ', error);
}
context.done();
context.done(error);
});
};
22 changes: 13 additions & 9 deletions Master/o365collector.js
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,17 @@ const m_o365mgmnt = require('../lib/o365_mgmnt');


exports.checkRegister = function (context, AlertlogicMasterTimer, azcollectSvc, callback) {
if (process.env.O365_COLLECTOR_ID) {
context.log('DEBUG: Reuse collector id', process.env.O365_COLLECTOR_ID);
if (process.env.O365_COLLECTOR_ID && process.env.O365_HOST_ID) {
context.log.verbose('Reuse collector id', process.env.O365_COLLECTOR_ID);
return callback(null, process.env.O365_COLLECTOR_ID);
} else {
// Collector is not registered.
azcollectSvc.register_o365().then(resp => {
m_appSettings.updateAppsettings({O365_COLLECTOR_ID: resp.source.id},
let newSettings = {
O365_COLLECTOR_ID: resp.source.id,
O365_HOST_ID: resp.source.host.id
};
m_appSettings.updateAppsettings(newSettings,
function(settingsError) {
if (settingsError) {
return callback(settingsError);
Expand Down Expand Up @@ -80,20 +84,20 @@ exports.checkin = function (context, AlertlogicMasterTimer, azcollectSvc, callba
var _checkEnableAuditStreams = function(context, listedStreams, callback) {
try {
let o365AuditStreams = JSON.parse(process.env.O365_CONTENT_STREAMS);
// TODO: take webhook path from O365Webhook/function.json
let webhookURL = 'https://' + process.env.WEBSITE_HOSTNAME +
'/api/o365/webhook';
async.map(o365AuditStreams,
function(stream, asyncCallback) {
let currentStream = listedStreams.find(
obj => obj.contentType === stream);
if (currentStream && currentStream.status === 'enabled' &&
currentStream.webhook &&
currentStream.webhook.status === 'enabled') {
context.log('DEBUG: Stream already enabled', stream);
currentStream.webhook.status === 'enabled' &&
currentStream.webhook.address === webhookURL) {
context.log.verbose('Stream already enabled', stream);
return asyncCallback(null, stream);
} else {
// TODO: take webhook path from O365Webhook/function.json
let webhookURL = 'https://' +
process.env.WEBSITE_HOSTNAME +
'/api/o365/webhook';
let webhook = { webhook : {
address : webhookURL,
expiration : ""
Expand Down
6 changes: 3 additions & 3 deletions O365WebHook/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -25,12 +25,12 @@ module.exports = function (context, event) {
return m_o365content.processNotifications(context, eventBody,
function(err) {
if (err) {
context.log(`ERROR: ${err}`);
context.log.error(`${err}`);
context.res.headers = {};
context.res.status = 500;
context.done();
context.done(err);
} else {
context.log('Debug: Success!');
context.log.info('OK!');
context.done();
}
});
Expand Down
2 changes: 1 addition & 1 deletion O365WebHook/ingest.js
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ class Ingest extends m_alServiceC.AlServiceC {
},
body : data
};
return this.post(`/data/o365msgs`, payload);
return this.post(`/data/aicspmsgs`, payload);
}
}

Expand Down
74 changes: 35 additions & 39 deletions O365WebHook/ingest_proto.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,14 +13,15 @@ const protobuf = require('protobufjs');
const async = require('async');
const Long = require('long');
const path = require('path');
const crypto = require('crypto');

// FIXME - protobuf load
// We have to load PROTO_DEF every invocation. Maybe the solution can to to use
// another library such as bpf which compiles proto to js.
module.exports.load = function(context, callback) {
protobuf.load(getCommonProtoPath(), function(err, root) {
if (err)
context.log('Error: Unable to load proto files.', err);
context.log.error('Unable to load proto files.', err);

callback(err, root);
});
Expand All @@ -33,7 +34,7 @@ module.exports.setMessage = function(context, root, content, callback) {
},
function(err, result) {
if (err)
context.log('Error: Unable to build messages.');
context.log.error('Unable to build messages.');

callback(err, result);
}
Expand All @@ -43,20 +44,28 @@ module.exports.setMessage = function(context, root, content, callback) {

module.exports.setHostMetadata = function(context, root, content, callback) {
var hostmetaType = root.lookupType('host_metadata.metadata');

var hostmetaData = getHostmeta(context, root);
var meta = {
hostUuid : process.env.O365_HOST_ID,
data : hostmetaData,
dataChecksum : new Buffer('')
};
var sha = crypto.createHash('sha1');
var hashPayload = hostmetaType.encode(meta).finish();
hashValue = sha.update(hashPayload).digest();

var metadataPayload = {
// FIXME - we need to calculate checksum properly
dataChecksum: new Buffer.from([234,104,231,10,12,60,139,208,204,230,
236,248,60,113,61,93,52,49,18,194]),
timestamp: Math.floor(Date.now() / 1000),
data: dummyMetadataDict(context, root)
hostUuid : process.env.O365_HOST_ID,
dataChecksum : hashValue,
timestamp : Math.floor(Date.now() / 1000),
data : hostmetaData
};

build(hostmetaType, metadataPayload, function(err, buf) {
if (err)
context.log('Error: Unable to build host_metadata.');
context.log.error('Unable to build host_metadata.');

callback(err, buf);
return callback(err, buf);
});
};

Expand All @@ -72,9 +81,9 @@ module.exports.setBatch = function(context, root, metadata, messages, callback)

build(batchType, batchPayload, function(err, buf) {
if (err)
context.log('Error: Unable to build collected_batch.');
context.log.error('Unable to build collected_batch.');

callback(err, buf);
return callback(err, buf);
});
};

Expand All @@ -88,16 +97,16 @@ module.exports.setBatchList = function(context, root, batches, callback) {

build(batchListType, batchListPayload, function(err, buf) {
if (err)
context.log('Error: Unable to build collected_batch_list.');
context.log.error('Unable to build collected_batch_list.');

callback(err, buf);
return callback(err, buf);
});
};

module.exports.encode = function(context, root, batchList, callback) {
var batchListType = root.lookupType('common_proto.collected_batch_list');
var buf = batchListType.encode(batchList).finish();
callback(null, buf);
return callback(null, buf);
};


Expand All @@ -110,7 +119,7 @@ function build(type, payload, callback) {

var payloadCreated = type.create(payload);

callback(null, payloadCreated);
return callback(null, payloadCreated);
}


Expand Down Expand Up @@ -139,44 +148,31 @@ function parseMessage(context, root, memo, content, callback) {

build(messageType, messagePayload, function(err, buf) {
if (err)
context.log('Error: Unable to build collected_message.');
context.log.error('Unable to build collected_message.');

memo.push(buf);
callback(err, memo);
return callback(err, memo);
});
}

// TODO - Fill Metadata dictionary with some dummy content.
// FIXME - we need to use some real data in metadata
function dummyMetadataDict(context, root) {
function getHostmeta(context, root) {
var dictType = root.lookupType('alc_dict.dict');
var elemType = root.lookupType('alc_dict.elem');
var valueType = root.lookupType('alc_dict.value');

var val1 = {str: 'standalone'};
var valPayload1 = buildSync(valueType, val1);

var val2 = {str: '454712-mnimn2.syd.intensive.int'};
var valPayload2 = buildSync(valueType, val2);

var elem1 = {
var hostTypeElem = {
key: 'host_type',
value: val1
value: {str: 'azure_fun'}
};
var elemPayload1 = buildSync(elemType, elem1);

var elem2 = {
var localHostnameElem = {
key: 'local_hostname',
value: val2
value: {str: process.env.WEBSITE_HOSTNAME}
};
var elemPayload2 = buildSync(elemType, elem2);

var dict = {
elem: [elem1, elem2]
elem: [localHostnameElem, hostTypeElem]
};
var dictPayload = buildSync(dictType, dict);

return dictPayload;

return buildSync(dictType, dict);
}


Expand Down
7 changes: 5 additions & 2 deletions O365WebHook/o365content.js
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ function parseContent(context, parsedContent, callback) {

var creationTime;
if (item.CreationTime == undefined) {
context.log('WARNING: Unable to parse CreationTime from content.');
context.log.warn('Unable to parse CreationTime from content.');
creationTime = Math.floor(Date.now() / 1000);
}
else {
Expand All @@ -105,7 +105,7 @@ function parseContent(context, parsedContent, callback) {
if (err) {
return callback(`Content parsing failure. ${err}`);
} else {
context.log('DEBUG: parsedData: ', result);
context.log.verbose('parsedData: ', result);
return callback(null, result);
}
}
Expand Down Expand Up @@ -152,6 +152,9 @@ function sendToIngest(context, content, callback) {
if (err) {
return callback(`Unable to compress. ${err}`);
} else {
if (compressed.byteLength > 700000)
context.log.warn(`Compressed log batch length`,
`(${compressed.byteLength}) exceeds maximum allowed value.`);
return g_ingestc.sendO365Data(compressed)
.then(resp => {
return callback(null, resp);
Expand Down
8 changes: 8 additions & 0 deletions PostDeploymentActions/updateMasterTimer.ps1
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
$date = Get-Date
$min = ($date.Minute + 1) % 15
$sec = $date.Second
$new_schedule = "$sec $min-59/15 * * * *"
Write-Output "Updating Master timer trigger with ($new_schedule)."
$master_function = Get-Content '..\\wwwroot\\Master\\function.json' -raw | ConvertFrom-Json
$master_function.bindings | % {if($_.name -eq 'AlertlogicMasterTimer'){$_.schedule=$new_schedule}}
$master_function | ConvertTo-Json | set-content '..\\wwwroot\\Master\\function.json'
9 changes: 9 additions & 0 deletions PostDeploymentActions/updateUpdaterTimer.ps1
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
$randH = Get-Random -minimum 0 -maximum 11
$randM = Get-Random -minimum 0 -maximum 59
$randS = Get-Random -minimum 0 -maximum 59
$randH12 = $randH + 12
$new_schedule = "$randS $randM $randH,$randH12 * * *"
Write-Output "Updating Updater timer trigger with ($new_schedule)".
$master_function = Get-Content '..\\wwwroot\\Updater\\function.json' -raw | ConvertFrom-Json
$master_function.bindings | % {if($_.name -eq 'AlertlogicUpdaterTimer'){$_.schedule=$new_schedule}}
$master_function | ConvertTo-Json | set-content '..\\wwwroot\\Updater\\function.json'
Loading

0 comments on commit 344d2d5

Please sign in to comment.