How to do it...

Our service is going to record events of interest in the system such as page loads. In a full system we might record this type of information against specific user ID's in order to analyze system usage patterns. However, since our basic system doesn't include user context we will simply be recording events as system events.

Let's start by creating a directory for our service and initializing it with a package.json, then install the redis, mongodb, and concordant modules, as follows:

$ cd micro
$ mkdir eventservice
$ cd eventservice
$ npm init -y
$ npm install --save redis mongodb concordant

Next we'll create the now familiar service structure within our micro/eventservice folder:

$ touch index.js wiring.js service.js 

Our usual service micro/eventservice/index.js code should look like so:

const wiring = require('./wiring')
const service = require('./service')()

wiring(service)

Next let's add our wiring. The wiring.js file should look like so:

 const { dns } = require('concordant')()
const redis = require('redis')
const QNAME = 'eventservice'

module.exports = wiring

function wiring (service) {

const endpoint = '_main._tcp.redis.micro.svc.cluster.local'

dns.resolve(endpoint, (err, locs) => {
if (err) {
console.log(err)
return
}
const { port, host } = locs[0]
pullFromQueue(redis.createClient(port, host))
})

function pullFromQueue (client) {
client.brpop(QNAME, 5, function (err, data) {
if (err) console.error(err)
if (err || !data) {
pullFromQueue(client)
return
}
const msg = JSON.parse(data[1])
const { action, returnPath } = msg
const cmd = service[action]
if (typeof cmd !== 'function') {
pullFromQueue(client)
return
}
cmd(msg, (err, result) => {
if (err) {
console.error(err)
pullFromQueue(client)
return
}
if (!returnPath) {
pullFromQueue(client)
return
}
client.lpush(returnPath, JSON.stringify(result), (err) => {
if (err) console.error(err)
pullFromQueue(client)
})
})
})
}
}

Finally, we'll populate our service.js with the following code:


const { MongoClient } = require('mongodb')
const { dns } = require('concordant')()

module.exports = service

function service () {
var db

setup()

function setup () {
const mongo = '_main._tcp.mongo.micro.svc.cluster.local'

dns.resolve(mongo, (err, locs) => {
if (err) {
console.error(err)
return
}
const { host, port } = locs[0]
const url = `mongodb://${host}:${port}/events`
MongoClient.connect(url, (err, client) => {
if (err) {
console.log('failed to connect to MongoDB retrying in
100ms')
setTimeout(setup, 100)
return
}
db = client
db.on('close', () => db = null)
})
})
}

function record (args, cb) {
if (!db) {
cb(Error('No database connection'))
return
}
const events = db.collection('events')
const data = {
ts: Date.now(),
eventType: args.type,
url: args.url
}
events.insert(data, (err, result) => {
if (err) {
cb(err)
return
}
cb(null, result)
})
}

function summary (args, cb) {
if (!db) {
cb(Error('No database connection'))
return
}
const summary = {}
const events = db.collection('events')
events.find({}).toArray( (err, docs) => {
if (err) return cb(err)

docs.forEach(function (doc) {
if (!(summary[doc.url])) {
summary[doc.url] = 1
} else {
summary[doc.url]++
}
})
cb(null, summary)
})
}

return {
record: record,
summary: summary
}
}

That takes care of our events service, which is exposed over a Redis queue.

Now we have to hook this into our web application (webapp). We're going to do this by adding a small piece of middleware to our Express server.

Let's enter the root of our project:

$ cd .. #assuming we're currently in micro/eventservice 

We should now be in the micro folder.

Let's enter the webapp folder and create a lib directory, with a file called event-logger.js:

$ cd webapp
$ npm i --save redis
$ mkdir lib
$ touch lib/event-logger.js

The content of event-logger.js should look like so:

 const { dns } = require('concordant')()
const redis = require('redis')

module.exports = eventLogger

function eventLogger () {
const QNAME = 'eventservice'
var client

const endpoint = '_main._tcp.redis.micro.svc.cluster.local'
dns.resolve(endpoint, (err, locs) => {
if (err) {
console.error(err)
return
}
const { port, host } = locs[0]
client = redis.createClient(port, host)
})

function middleware (req, res, next) {
if (!client) {
console.log('client not ready, waiting 100ms')
setTimeout(middleware, 100, req, res, next)
return
}
const event = {
action: 'record',
type: 'page',
url: `${req.protocol}://${req.get('host')}${req.originalUrl}`
}
client.lpush(QNAME, JSON.stringify(event), (err) => {
if (err) console.error(err)
next()
})
}

return middleware
}

Next we need to hook this into our application as a piece of middleware. Let's open the file micro/webapp/app.js.

At the top of micro/webapp/app.js we'll append the following to the bottom of all the preexisting require statements:

var eventLogger = require('./lib/event-logger')

At the top of the middleware section, let's insert:

app.use(eventLogger())

Now every request to our server will generate and send an event message to the Redis queue for each page load event in the system.

Finally, we need something to read our recorded events for us. We implemented a summary method in the eventservice so we need some way to call this method. We would not normally expose this type of information to our webapp. So let's just write a small command line application to expose this summary information for us in lieu of a full analytics system! To do this we'll create a new directory called report and initialize it with a package.json, install redis, cli-table, and concordant and then create an env.js and index.js files:

$ cd micro
$ mkdir report
$ cd report
$ npm init -y
$ npm install --save redis cli-table concordant
$ touch env.js index.js

The concordant module uses environment variables to determine appropriate DNS resolution behavior, so let's set view environment variables up in the env.js file like so:

const env = {
DNS_NAMESPACE: 'micro',
DNS_SUFFIX: 'svc.cluster.local'
}

if (process.env.NODE_ENV !== 'production') {
Object.assign(env, {
DNS_HOST: '127.0.0.1',
DNS_PORT: '53053'
})
}
Object.assign(process.env, env)

Our micro/report/env.js file will be required before concordant is initialized. Our micro/report/index.js should look like so:

 require('./env')
const { dns } = require('concordant')()
const redis = require('redis')
const CliTable = require('cli-table')
const QNAME = 'eventservice'
const RESPONSE_QUEUE = 'summary'
const ENDPOINT = '_main._tcp.redis.micro.svc.cluster.local'

dns.resolve(ENDPOINT, report)

function report (err, locs) {
if (err) { return console.log(err) }
const { port, host } = locs[0]
const client = redis.createClient(port, host)
const event = JSON.stringify({
action: 'summary',
returnPath: RESPONSE_QUEUE
})

client.lpush(QNAME, event, (err) => {
if (err) {
console.error(err)
return
}

client.brpop(RESPONSE_QUEUE, 5, (err, data) => {
if (err) {
console.error(err)
return
}
const summary = JSON.parse(data[1])
const cols = Object.keys(summary).map((url) => [url,
summary[url]])
const table = new CliTable({
head: ['url', 'count'],
colWidths: [50, 10]
})
table.push(...cols)
console.log(table.toString())
client.quit()
})
})
}

Finally, we need to add the Redis container and our new eventservice to our Fuge configuration.

Let's edit fuge/fuge.yml, adding the following two entries:

eventservice:
type: process
path: ../eventservice
run: 'node index.js'

redis:
image: redis
type: container
ports:
- main=6379:6379

Now we start up our system in the Fuge shell:

$ fuge shell fuge/fuge.yml
fuge> start all

We should be able to see that along with the rest of our system the Redis container and eventservice have also started up. As before we can browse the application, add some numbers, and look at the audit log. However, this time every page load is being recorded to Redis.

Let's confirm this by running a report.

Let's open up another terminal (leaving the Fuge shell open to keep our system alive) and execute the following:

$ cd micro/report
$ node index.js

Output similar to the following should be displayed:

..................Content has been hidden....................

You can't read the all page of ebook, please click here login for view all page.
Reset