Homelab, Linux & ABAP (~˘▾˘)~
 

[nodejs] Create buffer from stream

Using a promise

const stream2Buffer = async () => {
        return new Promise(function (resolve, reject) {
                const chunks = []
                stream.on('data', chunk => chunks.push(chunk))
                stream.on('end', () => resolve(Buffer.concat(chunks)))
                stream.on("error", err => reject(err))
        })
}
const buffer = await stream2Buffer()

A stream is also iterable (see here), so you can also use for await...of (example)

        const chunks = []
        for await (const chunk of stream) {
            chunks.push(chunk)
        }
        const buffer = Buffer.concat(chunks)

[nodejs] Parsing multipart/mixed response (containing a file stream)

Recently I had to consume an API which returned multipart/mixed data. A response looked like this:

--Boundary_0000000000001
Content-Type: application/octet-stream
Content-Disposition: attachment; filename"test.pdf"

%PDF-1.7
%�������
1 0 obj
...
%%EOF

--Boundary_0000000000001
Content-Type: application/json

{"data":[]}
--Boundary_0000000000001--

There are some node packages for parsing multipart responses, but most can only handle multipart/formData and not multipart/mixed. The most recommended package for multipart/mixed is Dicer, but to be honest, I wasn’t sure how to use it properly. Therefore, I built my own parser. Luckily the user idbehold provided a function to parse a response string into a json object here. To get it working, I just had to change the regex expressions in the split function. The most important step is to convert the data from the arrayBuffer to a String in binary encoding before parsing.

Also, I wrote two helper functions. The first one to parse the boundary string from the Content-Type and the second one to parse the filename from the Content-Dispositon Header of your response.

module.exports = new class multipartMixedParser {

    parse(boundary, buffer) {
        const body = buffer.toString('binary') //toString encodes to utf-8 as default, this would lead to corrupted pdf's     
        return body.split(boundary).reduce((parts, part) => {
            if (part && part !== '--\r\n') {
                const [head, body] = part.trim().split(/\r\n\r\n/g)
                console.log({ body })
                parts.push({
                    body: body,
                    headers: head.split(/\r\n/g).reduce((headers, header) => {
                        const [key, value] = header.split(/:\s+/)
                        headers[key.toLowerCase()] = value
                        return headers
                    }, {})
                })
            }
            return parts
        }, [])
    }

    getBoundaryFromResponseHeaders(headers) {
        //example: multipart/mixed;boundary=Boundary_0000000000001 -> --Boundary_0000000000001
        const contentType = headers.get('content-type')
        return '--' + contentType.split("=")[1].split(";")[0]
    }

    getFileNameFromContentDisposition(cd) {
        //example: 'attachment; filename="example.pdf"' -> example.pdf
        return cd.slice(
            cd.indexOf('"') + 1,
            cd.lastIndexOf('"')
        )
    }

}

And that’s how I’m calling the API and using the multipartMixedParser Class to parse the response. The API I was using is expecting a file as formData and is also returning a file (as part of the multipart/mixed response).
It’s important to get the buffer from the response. If you would use response.getText() it would convert the data to an utf-8 encoded string which will lead to corrupted files.

Please note, I’m using node-fetch. When using Axios, the response object will look different.

const btoa = require('btoa')
const FormData = require('form-data')
const fetch = require('node-fetch')
const multipartMixedParser = require('./multipartMixedParser') 

function callAPI(file) {

        const form = new FormData()
        form.append('file', file.content, {
            contentType: file.mediaType,
            filename: file.fileName
        })

        const headers = {
            'Authorization': 'Basic ' + btoa(username + ':' + password),
            ...form.getHeaders()
        }

        const url = /my/api/path

        try {
            const response = await fetch(url, {
                method: 'POST',
                headers: headers,
                body: form
            })
            if (!response.ok) throw new Error(response.statusText)

            //parse the response
            const buffer = await response.buffer() 
            const boundary = multipartMixedParser.getBoundaryFromResponseHeaders(response.headers)

            const result = multipartMixedParser.parse(boundary, buffer)

            // in my case I only returned the file content as buffer and filename 
            return {
                fileContent: Buffer.from(result[0].body, 'binary'),
                fileName: multipartMixedParser.getFileNameFromContentDisposition(result[0].headers["content-disposition"])
            }
        } catch (err) {
            console.log("Error message: " + err.message)
        }

}

[nodejs] iterate through fetch response headers

https://github.github.io/fetch/#Headers

        const response = await fetch("https://example.com/api")

        for (const [key, value] of response.headers) {
            console.log(key, value)
          }

An alternative would be forEach()

        response.headers.forEach((value, key) => {
            console.log(value, key)
        })

Or using the entries() iterator (ES8)

          const headerIterator = response.headers.entries()
          console.log(headerIterator.next().value)
          console.log(headerIterator.next().value)

To add a new header just use set()

response.set(key, value)

[Postman] Visualize base64 image

If you have a service which returns a payload like the following (including a base64 encoded jpeg) you can display it directly in postman.

{
        "photo": "/9j/4AAQSkZJRgABAgAAAQABAAD/2wBDAAMCAgMCAgMDAwMEAwMEBQgFBQQEBQoHBwYIDAoMDAsK\r\nCwsND..............",
        "photoId": "192",
        "mimeType": "image/jpeg"
}

This can be done with a few lines of code. In Postman navigate to the “Tests” tab:

and insert the following lines:

//output to postman console
console.log("PhotoId: " + pm.response.json()["photoId"]);
console.log("Base64: " + pm.response.json()["photo"]);

//output in visualize tab
let template = `<img src='{{img}}'/>`;

pm.visualizer.set(template, { 
    img: `data:image/jpeg;base64,${pm.response.json()["photo"]}`
});

In the “Visualize” tab you should now find your image

[nodejs] read and write a file

https://nodejs.dev/learn/reading-files-with-nodejs

https://nodejs.dev/learn/writing-files-with-nodejs

        const fs = require("fs")

        try {
            // read from local folder
            const localPDF = fs.readFileSync('PDFs/myFile.pdf')

            //write back to local folder
            fs.writeFileSync('PDFs/writtenBack.pdf', localPDF )

        } catch (err) {
            console.error(err)
        }

Converting to Base64

        try {
            // read from local folder
            const localPDF = fs.readFileSync('PDFs/myFile.pdf')
            const localBase64 = localPDF.toString('base64')

            //write back to local folder
            fs.writeFileSync(`PDFs/writtenBack.pdf`, localBase64, {encoding: 'base64'})

        } catch (err) {
            console.error(err)
        }

Reading and writing using streams with pipe

        //read and write local file
        const reader = fs.createReadStream("PDFs/myFile.pdf")
        const writer = fs.createWriteStream('PDFs/writtenBack.pdf');
        reader.pipe(writer)

[nodejs] APIs and Microservices Projects

These are my notes while doing the course APIs and Microservices on https://www.freecodecamp.org. I highly recommend it if you prefer to try things directly rather than watching videos.


Timestamp Microservice

https://repl.it/@nocin/boilerplate-project-timestamp#server.js

app.get("/api/timestamp/", (req, res) => {
  res.json({ unix: Date.now(), utc: Date() });
});

app.get("/api/timestamp/:date?", (req, res) => {

  //utc date?
  let date = new Date(req.params.date)
  if (date != "Invalid Date") {
   res.json({unix: date.getTime(), utc: date.toUTCString()});
  }

  //unix timestamp?
  const dateInt = parseInt(req.params.date);
  date = new Date(dateInt).toUTCString();
  if (date != "Invalid Date") {
    res.json({unix: dateInt, utc: date});
  }
  
  //invalid input
  res.json({ error: date });
});


Request Header Parser Microservice

https://repl.it/@nocin/boilerplate-project-headerparser#server.js

https://www.npmjs.com/package/express-useragent
https://www.npmjs.com/package/express-request-language

var useragent = require('express-useragent');
var cookieParser = require('cookie-parser');
var requestLanguage = require('express-request-language');

// stuff...

app.use(useragent.express());
app.use(cookieParser());
app.use(requestLanguage({
  languages: ['en-US', 'zh-CN'],
  cookie: {
    name: 'language',
    options: { maxAge: 24*3600*1000 },
    url: '/languages/{language}'
  }
}));

app.get("/api/whoami", (req, res) => {
  res.json({"ipaddress": req.ip,
            "language": req.language,
            "software": req.useragent.source });
});


URL Shortener Microservice

https://repl.it/@nocin/boilerplate-project-urlshortener#server.js

require('dotenv').config();
const express = require('express');
const cors = require('cors');
const app = express();
const bodyParser = require('body-parser');
const dns = require('dns');


// Basic Configuration
const port = process.env.PORT || 3000;

app.use(cors());

app.use('/public', express.static(`${process.cwd()}/public`));

app.get('/', function(req, res) {
  res.sendFile(process.cwd() + '/views/index.html');
});

app.use(bodyParser.urlencoded({extended: false}));

let urls = [];

//POST
app.post("/api/shorturl/new", function(req, res) {
  
  const getHostnameFromRegex = (url) => {
  // run against regex
  const matches = url.match(/^https?\:\/\/([^\/?#]+)(?:[\/?#]|$)/i);
  // extract hostname (will be null if no match is found)
  return matches && matches[1];
  }

  hostname = getHostnameFromRegex(req.body.url);
  console.log("Hostname: " + hostname);

  // if no hostname found, return here
  if (!hostname) res.json({ error: 'invalid url' });

  // check if url is valid
  dns.lookup(hostname, (error, addresses) => {
    console.error(error);
    console.log(addresses);

    if (!error) {
       let newUrl = { original_url : req.body.url, short_url : urls.length + 1};
      urls.push(newUrl);
      res.json(newUrl);
    } else {
      res.json({ error: 'invalid url' });
    }

  });

});

//GET
app.get('/api/shorturl/:num', function(req, res) {

  for (let i = 0; i < urls.length; i++) {
    console.log(urls[i].original_url);
    if (urls[i].short_url == req.params.num) {
        res.redirect(urls[i].original_url);
    }
  }

});

app.listen(port, function() {
  console.log(`Listening on port ${port}`);
});


Exercise Tracker

https://repl.it/@nocin/boilerplate-project-exercisetracker#server.js

const express = require('express')
const app = express()
const cors = require('cors')
require('dotenv').config()
const bodyParser = require('body-parser');
const mongoose = require('mongoose');

app.use(cors())
app.use(express.static('public'))
app.get('/', (req, res) => {
    res.sendFile(__dirname + '/views/index.html')
});


const listener = app.listen(process.env.PORT || 3000, () => {
    console.log('Your app is listening on port ' + listener.address().port)
})


//BodyParser
app.use(bodyParser.urlencoded({ extended: false }));

//DB connect
mongoose.connect(process.env.MONGO_URI, { useNewUrlParser: true, useUnifiedTopology: true });

const { Schema } = mongoose;

//User Schema
const userSchema = new Schema({
    username: { type: String, required: true },
});
const User = mongoose.model("User", userSchema);

//Exercise Schema
const exerciseSchema = new Schema({
    userId: Schema.Types.ObjectId,
    description: { type: String, required: true },
    duration: { type: Number, required: true },
    date: { type: Date, default: Date.now }
});
const Exercise = mongoose.model("Exercise", exerciseSchema);


//POST user to DB
app.post("/api/exercise/new-user", (req, res) => {

    let user = new User({ username: req.body.username });

    user.save((err, data) => {
        //console.log("created User: " + data);
        if (err) return console.error(err);
        res.json({ username: data.username, _id: data._id });
    });

});


//GET all users from DB
app.get("/api/exercise/users", (req, res) => {
    User.find((err, usersFound) => {
        if (err) return console.error(err);
        //console.error("users found: " + usersFound);
        res.json(usersFound);
    })
});


//POST exercise form data
app.post("/api/exercise/add", (req, res) => {

    let exercise = new Exercise({
        userId: req.body.userId,
        description: req.body.description,
        duration: req.body.duration,
        date: req.body.date ? req.body.date : Date.now()
    });

    exercise.save((err, data) => {
        //console.log("created exercise: " + data);
        if (err) return console.error(err);
        User.findById(exercise.userId, (err, userFound) => {
            if (err) return console.error(err);
            //console.log("userFound " + userFound.username); 
            res.json({
                _id: data.userId,
                username: userFound.username,
                date: data.date.toDateString(),
                duration: data.duration,
                description: data.description
            });
        });
    });
});


//GET exercise log
app.get("/api/exercise/log", (req, res) => {
    console.log(req.query.userId);
    console.log(req.query.from);
    console.log(req.query.to);
    console.log(req.query.limit);

    let userId = req.query.userId;
    let limit = Number(req.query.limit);

    //create query filter
    let filter = {};
    filter.userId = userId;

    if (req.query.from && req.query.to) {
        let fromDate = new Date(req.query.from);
        let toDate = new Date(req.query.to);
        filter.date = { $gte: fromDate, $lte: toDate };
    }

    console.log("Filter " + JSON.stringify(filter));

    const queryExercises = (done) => {
        Exercise.find(filter)
            .limit(limit)
            .exec((err, exercices) => {
                if (err) return console.error(err);
                done(exercices);
            })
    };

    const paseExercises = (exercices) => {
        let logArray = [];

        for (let i = 0; i < exercices.length; i++) {
            var obj = exercices[i];
            logArray.push({
                description: obj.description,
                duration: obj.duration,
                date: obj.date.toDateString()
            });
        }
        console.log(logArray);

        User.findById(userId, (err, userFound) => {
            if (err) return console.error(err);
            let logger = {
                _id: userId,
                username: userFound.username,
                count: logArray.length,
                log: logArray
            };
            res.json(logger);
        });
    }

    //Execute Query
    queryExercises(paseExercises);

});

File Metadata Microservice

https://repl.it/@nocin/boilerplate-project-filemetadata#server.js

https://www.npmjs.com/package/multer

var express = require('express');
var cors = require('cors');
require('dotenv').config()
var multer  = require('multer')
var upload = multer({ dest: 'uploads/' });

var app = express();

app.use(cors());
app.use('/public', express.static(process.cwd() + '/public'));

app.get('/', function (req, res) {
    res.sendFile(process.cwd() + '/views/index.html');
});


const port = process.env.PORT || 3000;
app.listen(port, function () {
  console.log('Your app is listening on port ' + port)
});


//POST 
app.post('/api/fileanalyse', upload.single('upfile'), (req, res, next) => {
  res.json({ name: req.file.originalname, type: req.file.mimetype, size: req.file.size  });
})

[nodejs] MongoDB and Mongoose Challenges

These are my notes while doing the course APIs and Microservices on https://www.freecodecamp.org. I highly recommend it if you prefer to try things directly rather than watching videos.


MongoDB is a database that stores data records (documents) for use by an application. Mongo is a non-relational, “NoSQL” database. This means Mongo stores all associated data within one record, instead of storing it across many preset tables as in a SQL database.
Mongo’s use of JSON as its document storage structure makes it a logical choice when learning backend JavaScript. Accessing documents and their properties is like accessing objects in JavaScript.

Mongoose.js is an npm module for Node.js that allows you to write objects for Mongo as you would in JavaScript.

MongoDB Atlas is a MongoDB Database-as-a-Service platform.


Install and Set Up Mongoose

Add mongodb and mongoose to the project’s package.json.

    "dependencies": {
        "body-parser": "^1.15.2",
        "dotenv": "^8.2.0",
        "express": "^4.12.4",
        "mongodb": "^3.6.4",
        "mongoose": "^5.11.15"
    },

Store your MongoDB Atlas database URI in a private .env file as MONGO_URI. Replace user and password.

MONGO_URI=mongodb+srv://<User>:<Password>@cluster0.xvsqx.mongodb.net/<dbname>?retryWrites=true&w=majority

Connect to the database using the following syntax:

const mongoose = require('mongoose');
mongoose.connect(process.env.MONGO_URI, { useNewUrlParser: true, useUnifiedTopology: true });

Create a Model

Everything in Mongoose starts with a Schema. Each schema maps to a MongoDB collection and defines the shape of the documents within that collection.

const { Schema } = mongoose;

const personSchema = new Schema({
  name: { type: String, required: true },
  age: Number,
  favoriteFoods: [String]
});

const Person = mongoose.model("Person", personSchema);

Create and Save a Record of a Model

The done() function is a callback that tells us that we can proceed after completing an asynchronous operation such as inserting, searching, updating, or deleting. It’s following the Node convention, and should be called as done(null, data) on success, or done(err) on error.

const createAndSavePerson = (done) => {

  let max = new Person({name: "Max", age: 31, favoriteFoods: ["Pasta"]});

  max.save((err, data) => {
    if (err) return console.error(err);
    done(null, data)
  });

};

Create Many Records with model.create()

var arrayOfPeople = [
    {name: "Max", age: 31, favoriteFoods: ["Pasta"]},
    {name: "Toni", age: 21, favoriteFoods: ["Pizza"]},
    {name: "Paul", age: 34, favoriteFoods: ["Bolo", "Penne"]}
    ];

const createManyPeople = (arrayOfPeople, done) => {
  Person.create(arrayOfPeople, (err, people) => {
    if (err) return console.error(err);
    done(null, people);
  });
};

Use model.find() to Search Your Database

Model.find() accepts a query document (a JSON object) as the first argument, then a callback. It returns an array of matches.

const findPeopleByName = (personName, done) => {
  Person.find({name: personName}, (err, personFound) => {
    if (err) return console.error(err);
    done(null, personFound);
  })
};

Use model.findOne() to Return a Single Matching Document from Your Database

Model.findOne() behaves like Model.find(), but it returns only one document (not an array), even if there are multiple items.

const findOneByFood = (food, done) => {
  Person.findOne({favoriteFoods: food}, (err, personFound) => {
    if (err) return console.error(err);
    done(null, personFound);
  })
};

Use model.findById() to Search Your Database By _id

When saving a document, MongoDB automatically adds the field _id, and set it to a unique alphanumeric key.

const findPersonById = (personId, done) => {
  Person.findById({_id: personId}, (err, personFound) => {
    if (err) return console.error(err);
    done(null, personFound);
  })
};

Perform Classic Updates by Running Find, Edit, then Save

const findEditThenSave = (personId, done) => {
  const foodToAdd = 'hamburger';

  Person.findById(personId, (err, person) => {
    if(err) return console.log(err); 
  
    person.favoriteFoods.push(foodToAdd);
    person.save((err, updatedPerson) => {
      if(err) return console.log(err);
      done(null, updatedPerson)
    })
  })
};

Perform New Updates on a Document Using model.findOneAndUpdate()

Use the function parameter personName as the search key. Set the person’s age to 20.
Note: You should return the updated document. To do that, you need to pass the options document { new: true } as the 3rd argument to findOneAndUpdate(). By default, these methods return the unmodified object.

const findAndUpdate = (personName, done) => {
  const ageToSet = 20;

  Person.findOneAndUpdate({name: personName}, {age: ageToSet}, { new: true }, (err, updatedDoc) => {
    if (err) return console.error(err);
    done(null, updatedDoc)
  })
};

Delete One Document Using model.findByIdAndRemove()

const removeById = (personId, done) => {

  Person.findByIdAndRemove(personId, (err, personDeleted) => {
    if (err) return console.error(err);
    done(null, personDeleted)
  })
};

Delete Many Documents with model.remove()

const removeManyPeople = (done) => {
  const nameToRemove = "Mary";

  Person.remove({name: nameToRemove}, (err, personsDeleted) => {
    if (err) return console.error(err);
    done(null, personsDeleted)
  })
};

Chain Search Query Helpers to Narrow Search Results

const queryChain = (done) => {
  const foodToSearch = "burrito";

  Person.find({favoriteFoods: foodToSearch})
  .sort({name: 1}) //sort bei name
  .limit(2) //only 2 results
  .select({age: 0})  //hide age
  .exec((err, twoPersonFound) => {
    if (err) return console.error(err);
    done(null, twoPersonFound)
  })
};