From 292ea98b123c4a4a6b36cdf0a8e14556cc5728f6 Mon Sep 17 00:00:00 2001 From: Kiryl Date: Wed, 8 Sep 2021 15:59:19 +0300 Subject: [PATCH] Add replace to work with Win --- src/epub_converter.py | 6 +++--- src/html_epub_preprocessor.py | 2 +- src/tmp.json | 1 + 3 files changed, 5 insertions(+), 4 deletions(-) create mode 100644 src/tmp.json diff --git a/src/epub_converter.py b/src/epub_converter.py index 07dc28c..ba4ca0e 100644 --- a/src/epub_converter.py +++ b/src/epub_converter.py @@ -96,7 +96,7 @@ class EpubConverter: def _read_css(self, css_href, html_path): path_to_css_from_html = css_href html_folder = dirname(html_path) - path_to_css_from_root = normpath(join(html_folder, path_to_css_from_html)) + path_to_css_from_root = normpath(join(html_folder, path_to_css_from_html)).replace('\\', '/') css_obj = self.ebooklib_book.get_item_with_href(path_to_css_from_root) assert css_obj, f'Css style {css_href} was not in manifest.' css_content: str = css_obj.get_content().decode() @@ -238,7 +238,7 @@ class EpubConverter: def match_href_to_path_from_toc(self, href, href_in_link, internal_link_tag): dir_name = os.path.dirname(href) - normed_path = os.path.normpath(os.path.join(dir_name, href_in_link)) + normed_path = os.path.normpath(os.path.join(dir_name, href_in_link)).replace('\\', '/') full_path = [path for path in self.added_to_toc_hrefs if normed_path in path] if not full_path: self.logger.log(f'Error in {href} file. No {normed_path} file found in added to TOC documents. ' @@ -413,7 +413,7 @@ if __name__ == "__main__": logger_object = BookLogger(name=f'epub', main_logger=logger, book_id=0) - json_converter = EpubConverter('/home/katerina/PycharmProjects/Jenia/converter/epub/9781119682387_pre_code2.epub', + json_converter = EpubConverter('../epub/calibri.epub', logger=logger_object) tmp = json_converter.convert_to_dict() diff --git a/src/html_epub_preprocessor.py b/src/html_epub_preprocessor.py index 417db7a..8689189 100644 --- a/src/html_epub_preprocessor.py +++ b/src/html_epub_preprocessor.py @@ -37,7 +37,7 @@ def update_src_links_in_images(body_tag: Tag, for img in img_tags: path_to_img_from_html = img.attrs.get('src') html_folder = os.path.dirname(path_to_html) - path_to_img_from_root = os.path.normpath(os.path.join(html_folder, path_to_img_from_html)) + path_to_img_from_root = os.path.normpath(os.path.join(html_folder, path_to_img_from_html)).replace('\\', '/') assert path_to_img_from_root in href2img_content, \ f'Image {path_to_img_from_html} in file {path_to_html} was not added to manifest.' diff --git a/src/tmp.json b/src/tmp.json new file mode 100644 index 0000000..6e4fae9 --- /dev/null +++ b/src/tmp.json @@ -0,0 +1 @@ +{"content": [{"title": "Title Page", "contents": ["  

Beginning API Development with Node.js
\n
\n

 

 

 

Build highly scalable, developer-friendly APIs for the modern web with JavaScript and Node.js

 

 

 

 

 

 

 

Anthony Nandaa

 

 

 

 

 

 

BIRMINGHAM - MUMBAI

"], "sub_items": []}, {"title": "Copyright and Credits", "contents": ["  "], "sub_items": [{"title": "Beginning API Development with Node.js", "contents": ["  \n 

 

Copyright © 2018 Packt Publishing

All rights reserved. No part of this book may be reproduced, stored in a retrieval system, or transmitted in any form or by any means, without the prior written permission of the publisher, except in the case of brief quotations embedded in critical articles or reviews.

Every effort has been made in the preparation of this book to ensure the accuracy of the information presented. However, the information contained in this book is sold without warranty, either express or implied. Neither the author, nor Packt Publishing or its dealers and distributors, will be held liable for any damages caused or alleged to have been caused directly or indirectly by this book.

Packt Publishing has endeavored to provide trademark information about all of the companies and products mentioned in this book by the appropriate use of capitals. However, Packt Publishing cannot guarantee the accuracy of this information.

 

 

 

Acquisitions Editor: Koushik Sen
\nContent Development Editors: Tanmayee Patil, Rutuja Yerunkar
\nProduction Coordinator: Ratan Pote

 

 

 

 

First published: July 2018

Production reference: 1230718

 

 

 

 

Published by Packt Publishing Ltd.
\nLivery Place
\n35 Livery Street
\nBirmingham
\nB3 2PB, UK.

ISBN 978-1-78953-966-0

www.packtpub.com

"], "sub_items": []}]}, {"title": "Packt Upsell", "contents": ["  

 

mapt.io

Mapt is an online digital library that gives you full access to over 5,000 books and videos, as well as industry leading tools to help you plan your personal development and advance your career. For more information, please visit our website.

"], "sub_items": [{"title": "Why Subscribe?", "contents": ["  \n "], "sub_items": []}, {"title": "PacktPub.com", "contents": ["  \n 

Did you know that Packt offers eBook versions of every book published, with PDF and ePub files available? You can upgrade to the eBook version at www.PacktPub.com and as a print book customer, you are entitled to a discount on the eBook copy. Get in touch with us at service@packtpub.com for more details.

At www.PacktPub.com, you can also read a collection of free technical articles, sign up for a range of free newsletters, and receive exclusive discounts and offers on Packt books and eBooks.

"], "sub_items": []}]}, {"title": "Contributors", "contents": ["  \n "], "sub_items": [{"title": "About the Author", "contents": ["  \n 

Anthony Nandaa is a senior software developer with more than 7 years of professional programming experience. He was introduced to programming 3 years before his career as a developer began, working with Pascal and VB 6. In his career so far, he has worked with multiple languages, such as Python, PHP, Go, and full-stack JavaScript.

In his current role, he leads a team of engineers working with Node.js and React for frontend development. He considers himself a lifelong learner, and lately, he has been learning Haskell for fun and to gain some insight into pure functional programming.

"], "sub_items": []}, {"title": "About the Reviewer", "contents": ["  \n 

Sam Anderson is an electronic engineer turned developer currently working for an award-winning creative digital agency based in Sheffield, England. Having moved from the world of hardware design, he is passionate about creating fast, beautiful, and efficient frontend applications. You can follow him on Twitter at @andomain.

 

 

 

"], "sub_items": []}, {"title": "Packt Is Searching for Authors like You", "contents": ["  \n 

If you're interested in becoming an author for Packt, please visit authors.packtpub.com and apply today. We have worked with thousands of developers and tech professionals, just like you, to help them share their insight with the global tech community. You can make a general application, apply for a specific hot topic that we are recruiting an author for, or submit your own idea.

"], "sub_items": []}]}, {"title": "Preface", "contents": ["   \n 

Using the same framework to build both server and client-side applications saves you time and money. This book teaches you how you can use JavaScript and Node.js to build highly scalable APIs that work well with lightweight cross-platform client applications. It begins with the basics of Node.js in the context of backend development, and quickly leads you through the creation of an example client that pairs up with a fully authenticated API implementation. 

This book balances theory and exercises, and contains multiple open-ended activities that use real-life business scenarios for you to practice and apply your newly acquired skills in a highly relevant context.

We have included over 20 practical activities and exercises across 9 topics to reinforce your learning. By the end of this book, you'll have the skills and exposure required to get hands-on with your own API development project.

"], "sub_items": [{"title": "Who This Book Is For", "contents": ["  \n 

This book is ideal for developers who already understand JavaScript and are looking for a quick no-frills introduction to API development with Node.js. Though prior experience with other server-side technologies such as Python, PHP, ASP.NET, Ruby will help, it's not essential to have a background in backend development before getting started.

"], "sub_items": []}, {"title": "What This Book Covers", "contents": ["  \n 

Chapter 1, Introduction to Node.js, covers a few fundamental concepts in Node.js, basic Node.js code and run it from the Terminal, module system, its categories, and asynchronous programming model that is at the heart of how Node.js works, and what actually makes Node.js tick.

Chapter 2, Building the API – Part 1, covers building a basic HTTP server, setting up Hapi.js, building basic API with Hapi.js Framework, and fundamental concepts of web applications.

Chapter 3, Building the API – Part 2, covers introduction to Knex.js and how we can use it to connect and use the database, essential CRUD database methods, API authentication using the JWT mechanism, CORS mechanism, testing the API using Lab library, and test automation using Gulp.js.

"], "sub_items": []}, {"title": "To Get the Most out of This Book", "contents": ["  \n 
    \n
  1. Prior experience with other server-side technologies, such as Python, PHP, ASP.NET, and Ruby will be beneficial but is not mandatory.
  2. \n
  3. This book will require a computer system. The minimum hardware requirements  are 1.8 GHz or higher Pentium 4 (or equivalent) processor, 4 GB RAM, 10 GB hard disk, and a stable internet connection.
  4. \n
  5. The software required are Visual Studio Code (https://code.visualstudio.com/), Node.js (8.9.1) (https://nodejs.org/en/), MySQL Workbench 6.3 (https://www.mysql.com/products/workbench/),  and MySQL (https://dev.mysql.com/downloads/mysql/).
  6. \n
"], "sub_items": [{"title": "Download the Example Code Files", "contents": ["  \n 

You can download the example code files for this book from your account at www.packtpub.com. If you purchased this book elsewhere, you can visit www.packtpub.com/support and register to have the files emailed directly to you.

You can download the code files by following these steps:

    \n
  1. Log in or register at www.packtpub.com.
  2. \n
  3. Select the SUPPORT tab.
  4. \n
  5. Click on Code Downloads & Errata.
  6. \n
  7. Enter the name of the book in the Search box and follow the onscreen instructions.
  8. \n

Once the file is downloaded, please make sure that you unzip or extract the folder using the latest version of:

The code bundle for the book is also hosted on GitHub at https://github.com/TrainingByPackt/BeginningAPIDevelopmentwithNode.jsIn case there's an update to the code, it will be updated on the existing GitHub repository.

We also have other code bundles from our rich catalog of books and videos available at https://github.com/PacktPublishing/. Check them out!

"], "sub_items": []}, {"title": "Conventions Used", "contents": ["  \n 

There are a number of text conventions used throughout this book.

CodeInText: Indicates code words in text, database table names, folder names, filenames, file extensions, pathnames, dummy URLs, user input, and Twitter handles. Here is an example: \"After this setup is done, we then start the server using the server.start method.\"

A block of code is set as follows:

handler: (request, reply) => 



 

Any command-line input or output is written as follows:

node server.js

 

Bold: Indicates a new term, an important word, or words that you see onscreen. For example, words in menus or dialog boxes appear in the text like this. Here is an example: \"Change the request type to POST.\"

Activity: These are scenario-based activities that will let you practically apply what you've learned over the course of a complete section. They are typically in the context of a real-world problem or situation.


\nWarnings or important notes appear like this.

"], "sub_items": []}]}, {"title": "Get in Touch", "contents": ["  \n 

Feedback from our readers is always welcome.

General feedback: Email feedback@packtpub.com and mention the book title in the subject of your message. If you have questions about any aspect of this book, please email us at questions@packtpub.com.

Errata: Although we have taken every care to ensure the accuracy of our content, mistakes do happen. If you have found a mistake in this book, we would be grateful if you would report this to us. Please visit www.packtpub.com/submit-errata, selecting your book, clicking on the Errata Submission Form link, and entering the details.

Piracy: If you come across any illegal copies of our works in any form on the Internet, we would be grateful if you would provide us with the location address or website name. Please contact us at copyright@packtpub.com with a link to the material.

If you are interested in becoming an author: If there is a topic that you have expertise in and you are interested in either writing or contributing to a book, please visit authors.packtpub.com.

"], "sub_items": [{"title": "Reviews", "contents": ["  \n 

Please leave a review. Once you have read and used this book, why not leave a review on the site that you purchased it from? Potential readers can then see and use your unbiased opinion to make purchase decisions, we at Packt can understand what you think about our products, and our authors can see your feedback on their book. Thank you!

For more information about Packt, please visit packtpub.com.

"], "sub_items": []}]}]}, {"title": "Introduction to Node.js", "contents": ["  \n 

This chapter is designed to cover a few fundamental concepts in Node.js, as we lay a foundation for our subsequent chapters on API development.

Let's start this first chapter with a quick dive into how Node.js works and where it's being used lately. We will then have a look at its module system and its asynchronous programming model. Let's get started.

By the end of this chapter, you will be able to:

"], "sub_items": [{"title": "The Basics of Node.js", "contents": ["  \n 

Node.js is an event-driven, server-side JavaScript environment. Node.js runs JS using the V8 engine developed by Google for use in their Chrome web browser. Leveraging V8 allows Node.js to provide a server-side runtime environment that compiles and executes JS at lightning speeds.

Node.js runs as a single-threaded process that acts upon callbacks and never blocks on the main thread, making it high-performing for web applications. A callback is basically a function that is passed to another function so that it can be called once that function is done. We will look into this in a later topic. This is known as the single-threaded event loop model. Other web technologies mainly follow the multithreaded request-response architecture.

The following diagram depicts the architecture of Node.js. As you can see, it's mostly C++ wrapped by a JavaScript layer. We will not go over the details of each component, since that is out of the scope of this chapter.

Node's goal is to offer an easy and safe way to build high-performance and scalable network applications in JavaScript.

"], "sub_items": [{"title": "Applications of Node.js", "contents": ["  \n 

Node.js has the following four major applications:

"], "sub_items": []}, {"title": "Activity: Running Basic Node.js Code", "contents": ["  \n 

Before You Begin

Open the IDE and the Terminal to implement this solution.

Aim

Learn how to write a basic Node.js file and run it.

Scenario

You are writing a very basic mathematical library with handy mathematical functions.

Steps for Completion

    \n
  1. Create your project directory (folder), where all the code for this and other chapters will be kept. You can call it beginning-nodejs for brevity. Inside this directory, create another directory named lesson-1, and inside that, create another directory called activity-a. All this can be done using the following command:
  2. \n
mkdir -p beginning-nodejs/lesson-1/activity-a

 

    \n
  1. Inside activity-a, create a file using touch maths.js command.  
  2. \n
  3. Inside this file, create the following functions:\n
      \n
    • add: This takes any two numbers and returns the sum of both, for example, add(2, 5) returns 7
    • \n
    • sum: Unlike add, takes any number of numbers and returns their sum, for example, sum(10, 5, 6) returns 21
    • \n
    \n
  4. \n
  5.  After these functions, write the following code to act as tests for your code:
  6. \n
console.log(add(10, 6)); // 16

 

    \n
  1. Now, on the Terminal, change directory to lesson-1. That's where we will be running most of our code from for the whole chapter.
  2. \n
  3. To run the code, run the following command:
  4. \n
node activity-a/math.js

 

The 16 and 21 values should be printed out on the Terminal.

Even though you can configure the IDE so that Node.js code be run at the click of a button, it's strongly recommend that you run the code from the Terminal to appreciate how Node.js actually works.
\nFor uniformity, if you are using a Windows machine, then run your commands from the Git Bash Terminal.
\n
\nFor the reference solution, use the math.js file at Code/Lesson-1/activity-solutions/activity-a.

"], "sub_items": []}]}, {"title": "The Module System", "contents": ["  \n 

Let's have a look at Node's module system and the different categories of the Node.js modules.

"], "sub_items": [{"title": "Application Modularization", "contents": ["  \n 

Like most programming languages, Node.js uses modules as a way of organizing code. The module system allows you to organize your code, hide information, and only expose the public interface of a component using module.exports.

Node.js uses the CommonJS specification for its module system:

Let's look at a simple example:

// math.js file















 

To call other functions such as mul and div, we'll use object destructuring as an alternative when requiring the module, for example, const { add } = require('./math');.
\n
\nThe code files for the section The Module System are placed at Code/Lesson-1/b-module-system.

"], "sub_items": []}, {"title": "Module Categories", "contents": ["  \n 

We can place Node.js modules into three categories:

"], "sub_items": [{"title": "Built-In Modules", "contents": ["  \n 

As mentioned earlier, these are modules that can be used straight-away without any further installation. All you need to do is to require them. There are quite a lot of them, but we will highlight a few that you are likely to come across when building web applications:

For example, the following code reads the content of the lesson-1/temp/sample.txt file using the in-built fs module:

const fs = require('fs');






 

The details of this code will be explained when we look at asynchronous programming later in this chapter.

"], "sub_items": []}, {"title": "npm – Third-Party Module Registry", "contents": ["  \n 

Node Package Manager (npm) is the package manager for JavaScript and the world's largest software registry, enabling developers to discover packages of reusable code.

To install an npm package, you only need to run the command npm install <package-name> within your project directory. We are going to use this a lot in the next two chapters.

Let's look at a simple example. If we wanted to use a package (library) like request in our project, we could run the following command on our Terminal, within our project directory:

npm install request

 

To use it in our code, we require it, like any other module:

const request = require('request');





 

More details about npm can be found here: https://docs.npmjs.com/. Recently, a new package manager was released called YARN (https://docs.npmjs.com/), which is becoming increasingly popular.
\n
\nWhen you run the npm install <module-name> command on your project for the first time, the node_modules folder gets created at the root of your project.

"], "sub_items": []}, {"title": "Scanning for node_modules", "contents": ["  \n 

It's worth noting how Node.js goes about resolving a particular required module. For example, if a file /home/tony/projects/foo.js has a require call require('bar'), Node.js scans the filesystem for node_modules in the following order. The first bar.js that is found is returned:

Node.js looks for node_moduels/bar in the current folder followed by every parent folder until it reaches the root of the filesystem tree for the current file.


\nThe module foo/index.js can be required as foo, without specifying index, and will be picked by default.

"], "sub_items": []}, {"title": "Handy npm Commands", "contents": ["  \n 

Let's dive a little deeper into npm, by looking at some of the handy npm commands that you will often use:

"], "sub_items": []}, {"title": "Local Modules", "contents": ["  \n 

We have already looked at how local modules are loaded from the previous example that had math.js and index.js.

Since JavaScript Object Notation (JSON) is such an important part of the web, Node.js has fully embraced it as a data format, even locally. You can load a JSON object from the local filesystem the same way you load a JavaScript module. During the module loading sequence, whenever a file.js is not found, Node.js looks for a file.json.

See the example files in lesson-1/b-module-system/1-basics/load-json.js:

const config = require('./config/sample');

 

Here, you will notice that once required, the JSON file is transformed into a JavaScript object implicitly. Other languages will have you read the file and perhaps use a different mechanism to convert the content into a data structure such as a map, a dictionary, and so on.

For local files, the extension is optional, but should there be a conflict, it might be necessary to specify the extension. For example, if we have both a sample.js and a sample.json file in the same folder, the .js file will be picked by default; it would be prudent to specify the extension, for example: const config = require('./config/sample.json');

\nWhen you run npm install, without specifying the module to install, npm will install the list of packages specified (under dependencies and devDependencies in the package.json file in your project). If package.json does not exist, it will give an error indicating that no such file has been found.

"], "sub_items": []}]}, {"title": "Activity: Using a Third-Party Package for the Previous math.js Code", "contents": ["  \n 

Before You Begin

This activity will build upon the, Running Basic Node.js activity of this chapter.

Aim

If the argument is a single array, sum up the numbers, and if it's more than one array, first combine the arrays into one before summing up. We will use the concat() function from lodash, which is a third-party package that we will install.

Scenario

We want to create a new function, sumArray, which can sum up numbers from one or more arrays.

Steps for Completion

    \n
  1. Inside Lesson-1, create another folder called activity-b.
  2. \n
  3. On the Terminal, change directory to activity-b and run the following command:
  4. \n
npm init

 

    \n
  1. This will take you to an interactive prompt; just press Enter all the way, leaving the answers as suggested defaults. The aim here is for us to get a package.json file, which will help us organize our installed packages.
  2. \n
  3. Since we will be using lodash, let's install it. Run the following command:
  4. \n
npm install lodash--save

 

Notice that we are adding the --save option on our command so that the package installed can be tracked in package.json. When you open the package.json file created in step 3, you will see an added dependencies key with the details.

    \n
  1. Create a math.js file in the activity-b directory and copy the math.js code from Activity, Running Basic Node.js into this file.
  2. \n
  3. Now, add the sumArray function right after the sum function.
  4. \n
  5. Start with requiring lodash, which we installed in step 4, since we are going to use it in the sumArray function:
  6. \n
const _ = require('lodash');

 

    \n
  1. The sumArray function should call the sum function to reuse our code. Hint: use the spread operator on the array. See the following code:
  2. \n
function sumArray() 











 

    \n
  1. At the end of the file, export the three functions, add, sum, and sumArray with module.exports.
  2. \n
  3. In the same activity-b folder, create a file, index.js.
  4. \n
  5. In index.js file, require ./math.js and go ahead to use sumArray:
  6. \n
// testing


 

    \n
  1. Run the following code on the Terminal:
  2. \n
node index.js

 

You should see 21 and 30 printed out.


\nThe solution files are placed at Code/Lesson-1/activitysolutions/activity-b.

"], "sub_items": []}]}, {"title": "Asynchronous Programming with Node.js", "contents": ["  \n 

Let's have a look at asynchronous programming model that is at the heart of how Node.js works.

"], "sub_items": [{"title": "Callbacks", "contents": ["  \n 

Callbacks are functions that are executed asynchronously, or at a later time. Instead of the code reading top to bottom procedurally, asynchronous programs may execute different functions at different times based on the order and speed of earlier functions.

Since JavaScript treats functions like any other object, we can pass a function as an argument in another function and alter execute that passed-in function or even return it to be executed later.

We saw such a function previously when we were looking at the fs module in The Module System section. Let's revisit it:

const fs = require('fs');






 


\nThe code files for Asynchronous Programming with Node.js are placed at Code/Lesson-1/c-async-programming.

On line 3, we use a variable part of the globals, _ _dirname, which basically gives us the absolute path of the directory (folder) in which our current file (read-file.js) is, from which we can access the temp/sample.txt file.

Our main point of discussion is the chunk of code between lines 5 and 8. Just like most of the methods you will come across in Node.js, they mostly take in a callback function as the last argument.

Most callback functions will take in two parameters, the first being the error object and the second, the results. For the preceding case, if file reading is successful, the error object, err, will be null and the contents of the file will be returned in the data object.

Let's break down this code for it to make more sense:

const fs = require('fs');







 

Now, let's look at the asynchronous part. Let's add an extra line to the preceding code:

const fs = require('fs');








 

See what we get as a print out:

Print out last!  hello,  world

 

How come Print out last! comes first? This is the whole essence of asynchronous programming. Node.js still runs on a single thread, line 10 executes in a non-blocking manner and moves on to the next line, which is console.log('Print out last!'). Since the previous line takes a long time, the next one will print first. Once the readFile process is done, it then prints out the content of file through the callback.

"], "sub_items": []}, {"title": "Promises", "contents": ["  \n 

Promises are an alternative to callbacks for delivering the results of an asynchronous computation. First, let's look at the basic structure of promises, before we briefly look at the advantages of using promises over normal callbacks.

Let's rewrite the code above with promises:

const fs = require('fs');















 

This code can further be simplified by using the util.promisify function, which takes a function following the common Node.js callback style, that is, taking an (err, value) => … callback as the last argument and returning a version that returns promises:

const fs = require('fs');





 

From what we have seen so far, promises provide a standard way of handling asynchronous code, making it a little more readable.

What if you had 10 files, and you wanted to read all of them? Promise.all comes to the rescue. Promise.all is a handy function that enables you to run asynchronous functions in parallel. Its input is an array of promises; its output is a single promise that is fulfilled with an array of the results:

const fs = require('fs');
















 

"], "sub_items": []}, {"title": "Async/Await", "contents": ["  \n 

This is one of the latest additions to Node.js, having been added early in 2017 with version 7.6, providing an even better way of writing asynchronous code, making it look and behave a little more like synchronous code.

Going back to our file reading example, say you wanted to get the contents of two files and concatenate them in order. This is how you can achieve that with async/await:

const fs = require('fs');









 

In summary, any asynchronous function that returns a promise can be awaited.

"], "sub_items": []}, {"title": "Activity: Transforming a Text File Using an Async Function", "contents": ["  \n 

Before You Begin

You should have already gone through the previous activities.

Aim

Read the file (using fs.readFile), in-file.txt, properly case format the names (using the lodash function, startCase), then sort the names in alphabetical order and write them out to a separate file out-file.txt (using fs.writeFile).

Scenario

We have a file, in-file.txt, containing a list of peoples' names. Some of the names have not been properly case formatted, for example, john doe should be changed to John Doe.

Steps for Completion

    \n
  1. \nIn Lesson-1, create another folder called activity-c.\n
  2. \n
  3. On the Terminal, change directory to activity-c and run the following command:
  4. \n
npm init

 

    \n
  1. Just like in the previous activity, this will take you to an interactive prompt; just press Enter all the way, leaving the answers as suggested defaults. The aim here is for us to get a package.json file, which will help us organize our installed packages.
  2. \n
  3. Since we will be using lodash here too, let's install it. Run, npm install lodash --save.
  4. \n
  5. Copy the in-file.txt file provided in the student-files directory into your activity-c directory.
  6. \n
  7. In your activity-c directory, create a file called index.js, where you will write your code.
  8. \n
  9. Now, go ahead and implement an async function transformFile, which will take the path to a file as an argument, transform it as described previously (under Aim), and write the output to an output file provided as a second parameter.
  10. \n
  11. On the Terminal, you should indicate when you are reading, writing, and done, for example:\n
      \n
    • reading file: in-file.txt
    • \n
    • writing file: out-file.txt
    • \n
    • done
    • \n
    \n
  12. \n

You should read the quick reference documentation on fs.writeFile since we haven't used it yet. However, you should be able to see its similarity with fs.readFile, and convert it into a promise function, as we did previously.

The solution files are placed at Code/Lesson-1/activitysolutions/activity-c.

"], "sub_items": []}]}, {"title": "Summary", "contents": ["  \n 

In this chapter, we went through a quick overview of Node.js, seeing how it looks under the hood.

We wrote basic Node.js code and ran it from the Terminal using the Node.js command.

We also looked at module system of Node.js, where we learnt about the three categories of Node.js modules, that is, in-built, third-party (installed from the npm registry), and local modules, and their examples. We also looked at how Node.js resolves a module name whenever you require it, by searching in the various directories.

We then finished off by looking at the asynchronous programming model that is at the heart of how Node.js works, and what actually makes Node.js tick. We looked at the three main ways you can write asynchronous code: using callbacks, Promises, and the
\nnew async/await paradigm.

The foundation is now laid for us to go ahead and implement our API using Node.js. Most of these concepts will crop up again as we build our API.

"], "sub_items": []}]}, {"title": "Building the API - Part 1", "contents": ["  \n 

This chapter is meant to introduce the students to API building using Node.js. We will start by building a basic HTTP server to gain an understanding of how Node.js works.

By the end of this chapter, you will be able to:

"], "sub_items": [{"title": "Building a Basic HTTP Server", "contents": ["  \n 

Let's begin by looking at the basic building blocks of a Node.js web application. The built-in http module is the core of this. However, from the following example, you will also appreciate how basic this can be.

Save the following code in a file called simple-server.js:

const http = require('http');









 


\nUse the simple-server.js file for your reference at Code/Lesson-2.

Now, let's run the file:

node simple-server.js

 

When we go to the browser and visit the URL in the example, this is what we get:

"], "sub_items": []}, {"title": "Setting up Hapi.js", "contents": ["  \n 

Hapi.js (HTTP API), is a rich framework for building applications and services, focusing on writing reusable application logic. There are a number of other frameworks; notable among them is Express.js. However, from the ground up, Hapi.js is optimized for API building, and we will see this shortly when building our application.

"], "sub_items": [{"title": "Exercise 1: Building a Basic Hapi.js Server", "contents": ["  \n 

In this exercise, we're going to build a basic HTTP server like the one before, but now with Hapi.js. You will notice how most of the things are done for us under the hood with Hapi.js. However, Hapi.js is also built on top of the http module.

For the rest of the exercises, from the first exercise of Chapter 3, Building the API – Part 2, we will be building on top of each exercise as we progress. So, we might need to go back and modify previous files and so forth:

    \n
  1. In your Lesson-2 folder, create a subfolder called hello-hapi.
  2. \n


\nUse the exercise-b1 folder for your reference at Code/Lesson-2.

    \n
  1. On the Terminal, change directory to the root of the hello-hapi folder.
  2. \n
  3. Initialize it as a basic Node.js project and run the following command:
  4. \n
npm init -y

 

    \n
  1. Create a file, server.js.
  2. \n
  3. Install Hapi.js by executing the following command:
  4. \n
npm install hapi --save

 

    \n
  1. In the file, write the following code:
  2. \n
const Hapi = require('hapi');













 


\nUse the server.js file for your reference at Code/Lesson-2/exercise-b1.

Let us try to understand the code:


\nRecall our subtopic, The Module System, in Chapter 1, Introduction to Node.js? We looked at third-party modules—this is one of them.


\nWe're going to look at another extra key, called config, in our main project.

    \n
  1. Run the server by going to the Terminal, and run the following command:
  2. \n
node server.js

 

    \n
  1. You should see this printed on the Terminal:
  2. \n
Server running at: http://localhost:8000

 

You should see something similar to this at http://localhost:8000:

Open another Terminal, change directory to the same project folder, and run the same command, node server.js. We'll get this error: Error: listen EADDRINUSE 127.0.0.1:8000.
\n
\nThe reason we get this error is because we can only have one server running on a particular port of our host. Remember that the host IP 127.0.0.1 is what we refer to as localhostif (err) throw err; is the line which throws the error.
\n
\nWe can fix this by changing the port number of our second server to something like 8001. However, as best practice, other than keep changing the code, we can pass the port number as a Terminal argument, that is, running the app as, node server.js <port-number>, then changing our code (in the port section) to, port: process.argv[2] || 8000,.

Here, we're saying, if the port is provided as the first argument of the script, use that, otherwise, use 8000 as the port number. Now, when you run: node server.js 8002, the server should run okay from localhost:8002.

For the process.argv array, index 0 is the program running the script, node and index 1 is the script being run, server.js. Arguments passed to the script are therefore counted from index 2 onwards. You can read more about process.argv here later on.

"], "sub_items": []}, {"title": "Using an API Client", "contents": ["  \n 

For us to utilize the client to the fullest, to be able to do all the request types (GET, POST, UPDATE, and so on), we will need to have an API client. There are a number out there, but we recommend either Postman (https://www.getpostman.com/) or Insomnia (https://insomnia.rest/). For our examples, we will be using Insomnia.

After installing Insomnia, add a GET request to http://localhost:8000:

    \n
  1. We will begin by creating a request page for Insomnia, where we will be making all of our requests:
  2. \n

Enter a name for the new request:

    \n
  1. We will then make our request by typing the route and clicking on Send:
  2. \n

When we change the type from

GET

to

POST

, and click on

Send

, we get a 404 error because, on our server, we currently have only the GET method defined for route

/

.

"], "sub_items": []}, {"title": "Returning JSON Strings", "contents": ["  \n 

As we are now building our API, we need a formal way of representing our data in our request, by sending or receiving it. JavaScript Object Notation (JSON) is the conventional data-interchange format for REST APIs.

One thing to note about JSON is that it started from JavaScript and is now widely adopted across other languages. So, when it comes to Node.js, you will see how using JSON becomes so easy and natural.

"], "sub_items": []}, {"title": "Exercise 2: Returning JSON", "contents": ["  \n 
    \n
  1. Let's go back to our server.js file from Exercise 1, Building a Basic Hapi.js Server.
  2. \n


\nUse the exercise-b2 folder for your reference at Code/Lesson-2.

    \n
  1. To return JSON for our / route, all we need to change is our returned string to an object:
  2. \n
handler: (request, reply) => 



 

    \n
  1. Stop the server by going to the Terminal where the server is running and pressing Ctrl + C. Then, start the server again to effect the changes by running the following command:
  2. \n
node server.js

 

    \n
  1. Now go back to Insomnia and do another GET request. You can see that this is effectively changed into a JSON string:
  2. \n
{


 

This comes out-of-the-box in Hapi.js, while with some frameworks, such as Express.js, you have to use a json function to do the conversion.

"], "sub_items": []}, {"title": "Using nodemon for Development Workflow", "contents": ["  \n 

You will have noticed that, after making the changes in first exercise, we had to go back and stop the server and start over again. Doing this every time you make a change to your code becomes very cumbersome. Luckily, tooling comes to our rescue.

There is a Node.js package called nodemon, which can help restart the server automatically whenever there is a change in our files.

"], "sub_items": []}, {"title": "Exercise 3: Using nodemon", "contents": ["  \n 

In this exercise, we're going to introduce a Node module known as nodemon, which we will be using to run our web server. This makes it possible for the server to automatically reload when we make changes to it, therefore avoiding the tediousness of stopping the server and starting it over again manually whenever we make changes to our server:

    \n
  1. Go back to the Terminal and stop the server (press Ctrl + C), then run the following command.
  2. \n
  3. We will need to install this package globally (remember that you might need some administrative rights, so in Unix systems, you need to run the command as sudo):
  4. \n
npm install --global nodemon

 

    \n
  1. Once installation is complete, we can run with nodemon:
  2. \n
nodemon server.js

 

You should get something like this:

[nodemon] 1.12.1[nodemon] to restart at any time, enter `rs`[nodemon] watching: *.*[nodemon] starting `node server.js`Server running at: http://localhost:8000

 

"], "sub_items": []}, {"title": "Setting up the Logger", "contents": ["  \n 

Logging is a very important component of any web application. We need a way of preserving the history of the server so that we can come back any time and see how it was serving requests.

And, most of all, you don't want logging to be an afterthought, only being implemented after you come across a production bug that makes your web app crash when you are trying to figure out where the problem is exactly.

Hapi.js has a minimal logging functionality built in, but if you need an extensive one, a good example is called good (https://github.com/hapijs/good).

"], "sub_items": []}, {"title": "Exercise 4: Setting up the Logger", "contents": ["  \n 

In this exercise, we're going to add a logging mechanism on the web server we have created, so that each request and server activity can be easily tracked through the logs:

    \n
  1. Let's go back to our project from Exercise 2: Returning JSON.
  2. \n


\nUse the exercise-b4 folder for your reference at Code/Lesson-2.

    \n
  1. We first need to install a couple of packages that will help with our logging (good and good-console). Run the following command:
  2. \n
npm install --save good good-console

 

good-console is what we call a write stream. There are other write streams that work with good, but, for simplicity, we won't look at them. You can check https://github.com/hapijs/good for more information.

    \n
  1. We will then modify our server.js code to configure our logging. First, by requiring good just after Hapi.js:
  2. \n
const Hapi = require('hapi');

 

    \n
  1. Then, registering it with the server just before we start the server:
  2. \n
// set up logging










 


\nUse the server.js file for your reference at Code/Lesson-2/exercise-b4.

    \n
  1. If you are still running the server with nodemon, by now, you will start seeing the server logs being updated periodically on the Terminal; something similar to:
  2. \n
171102/012027.934, [ops] memory: 34Mb, uptime (seconds):100.387, load: [1.94580078125,1.740234375,1.72021484375]171102/012207.935, [ops] memory: 35Mb, uptime (seconds):200.389, load: [2.515625,2.029296875,1.83544921875]...

 

    \n
  1. Now, go back to Insomnia and try to do another GET request on localhost:8000/. You will see an extra log has been created showing the time the request was made (timestamp), the route, the method (get), the status code (200), and the time taken for the request:
  2. \n
171102/012934.889, [response] http://localhost:8000: get /{} 200 (13ms)

 


\nThe time taken comes in very handy when you are trying to optimize the performance of your server, seeing which requests take longer than expected to be served.

"], "sub_items": []}]}, {"title": "Understanding Requests", "contents": ["  \n 

Let's have a look at the concept of request and the different HTTP request methods.

"], "sub_items": [{"title": "A Look at HTTP Request Methods", "contents": ["  \n 

Having set up our server, we are ready to start building our API. The routes are basically what constitute the actual API.

We will first look at HTTP request methods (sometimes referred to as HTTP verbs), then apply them to our API using a simple todo list example. We will look at five major ones:

In the following exercises, we're going to rewrite our previous code where we had hardcoded our data so that we can work with real and dynamic data coming directly from the database.

"], "sub_items": []}, {"title": "Exercise 5: Getting a List of Resources", "contents": ["  \n 
    \n
  1. Let's go back to the project from Exercise 4: Setting up the Logger.
  2. \n


\nUse the exercise-c1 folder for your reference at Code/Lesson-2.

    \n
  1. Since we are going to have various routes, it would be prudent to now split our routes to a separate file for the sake of organization. Within the project, create a subfolder called routes.
  2. \n
  3. Inside the created folder, create a file called todo.js. In todo.js, this is where we are going to have all our routes for the todo resource. This file (module) will export a list of routes.
  4. \n
  5. Let's start by doing a simple route that returns a list of todos on a GET request:
  6. \n
const todoList = [











 


\nUse the todo.js file for your reference at Code/Lesson-2/exercise-c1/routes.

    \n
  1. We then go back to our server.js file, require the todo route module, and register it with our server using the server.route method:
  2. \n
const routes = {};









 


\nUse the server.js file for your reference at Code/Lesson-2/exercise-c1.

    \n
  1. Using Insomnia, do a GET request to http://localhost:8000/todo. You should see this returned:
  2. \n

"], "sub_items": []}, {"title": "Exercise 6: Getting a Specific Resource", "contents": ["  \n 
    \n
  1. Now, let's try and get a specific todo. Since we don't have a database with IDs, we will take the indices to be IDs, [0] being 1, and so on.
  2. \n


\nUse the exercise-c1 folder for your reference at Code/Lesson-2.

    \n
  1. Let's add a route for that. Notice that we use {<parameter-key>} as a way of passing request parameters to our route function, then get it through request.params.id:
  2. \n
module.exports = [











 


\nUse the todo.js file for your reference at Code/Lesson-2/exercise-c1/routes.

    \n
  1. Go to Insomnia and do a GET request to http://localhost:8000/todo/1. You should see this:
  2. \n

"], "sub_items": []}, {"title": "Exercise 7: Creating a New Todo with POST", "contents": ["  \n 
    \n
  1. Now let's add a new todo. This is where POST comes in. A POST request should always come with a payload which is the data that is being posted. We will add a new route to handle this:
  2. \n
module.exports = [










 


\nUse the todo.js file for your reference at Code/Lesson-2/exercise-c1/routes.

    \n
  1. On Insomnia:\n
      \n
    1. Change the request type to POST:
    2. \n
    \n
  2. \n

    \n
  1. When you post the request, you should see this as the response:
  2. \n
{


 

    \n
  1. Now, when you do a GET request to http://localhost:8000/todo, you should see the newly created todo appear as part of the response:
  2. \n
[




 

    \"list\": 






 

"], "sub_items": []}, {"title": "Exercise 8: Updating a Resource with PUT", "contents": ["  \n 
    \n
  1. If we wanted to update, say, the first todo list, conventionally, PUT requires us to send the whole updated todo resource. Now let's create a PUT route:
  2. \n
{









 


\nUse the todo.js file for your reference at Code/Lesson-2/exercise-c1/routes.

    \n
  1. Now go to Insomnia and make the request. Remember to change the request type to PUT:
  2. \n

    \n
  1. You should see the following response:
  2. \n
{


 

    \n
  1. And when you do a GET on http://localhost:8000/todo/1, you should get the updated resource:
  2. \n

"], "sub_items": []}, {"title": "Exercise 9: Updating with PATCH", "contents": ["  \n 
    \n
  1. You will realize that, in our previous exercise, we had to post the whole resource just to change a part of it. A better way of doing this is using PATCH, so that the payload only contains what is required. Let's now create a PATCH route:
  2. \n
{













 


\nUse the todo.js file for your reference at Code/Lesson-2/exercise-c1/routes.

    \n
  1. Now, you can provide any of the keys and their values, and they will be updated respectively. For example, make the following request, only changing the title of the first todo:
  2. \n

    \n
  1. You should get the following response:
  2. \n
{


 

    \n
  1. And when you do a GET on http://localhost:8000/todo/1, you should get the updated resource:
  2. \n

"], "sub_items": []}, {"title": "Exercise 10: Deleting a Resource with DELETE", "contents": ["  \n 
    \n
  1. When we want to delete a resource, we use the DELETE method. Let's create a DELETE route:
  2. \n
{








 


\nUse the 
exercise-c1 folder for your reference at Code/Lesson-2.
    \n
  1. Now go to Insomnia and test it—you should get this response:
  2. \n

    \n
  1. Now try accessing the previously deleted resources—you should get a 404 error. However, in our previous GET route (in Exercise 6: Getting a Specific Resource), we did not cater for this, so let's go and make a modification to our GET: /todo/{id} route:
  2. \n
{










 

Use the todo.js file for your reference at Code/Lesson-2/exercise-c1/routes.
\n
\nDon't worry about the status code, 404, if you have never come across it. We are going to go through the major status codes in our last subsection of this section.

    \n
  1. Remember, the server will reload this time, therefore, the deleted resource will still be brought back, so go back and repeat step 2.
  2. \n
  3. When you now do a GET request to http://localhost:8000/todo/1, you should see this:
  4. \n
{


 

Short Closing Note on Insomnia

\nYou should be able to access all your previous requests under

History

. Click on the Time icon in the top-right corner.

"], "sub_items": []}, {"title": "Request Validation", "contents": ["  \n 

We will need to validate the incoming requests to make sure that they conform to what the server can handle.

This is one of the places I see Hapi.js shining above other frameworks. In Hapi.js, you hook in validation as a configuration object as part of the route object. For validation, we will use the Joi library, which works well with Hapi.js.

"], "sub_items": []}, {"title": "Exercise 11: Validating a Request", "contents": ["  \n 

In this exercise, we are going to see the concept of request validation in action. We will write a validation for one of the routes as an example, but the same could be applied across the other routes:

    \n
  1. For example, if we go back to the POST route from Exercise 1: Building a Basic Hapi.js Server, we can post an empty payload and still get status code 200! Clearly, we need a way of validating this.
  2. \n
  3. Let's start by installing Joi:
  4. \n
npm install joi --save

 


\nUse the exercise-c2 folder for your reference at Code/Lesson-2.

    \n
  1. In the routes/todo.js file, we need to require Joi and then modify our post route by adding a config.validate key to the route object:
  2. \n
{










 


\nUse the todo.js file for your reference at Code/Lesson-2/exercise-c1/routes.

    \n
  1. When we try to submit an empty payload, we now get error 400:
  2. \n

    \n
  1. That is, until we provide a title for the todo, since a title is required:
  2. \n

Joi is a full-fledged validation library with many options for how to use it. In this exercise, we just touched on a basic example.
\n
\nYou validate any part of the request by coming up with the respective key/value pair within the validate key and its respective type:
\n
\npayload (for request payloads, as in the preceding exercise), params (for request params), and query (for query params).
\n
\nFor example, for the request, GET: /todo/:id, if we want to validate that the ID is an integer, we will add this config object:
\nconfig: {
\n  validate:
\n    {

\n    params:
\n     {

\n      id: Joi.number()
\n    }
\n  }
\n}

More details on Joi can be found here: https://github.com/hapijs/joi.

"], "sub_items": []}]}, {"title": "Summary", "contents": ["  \n 

This chapter has covered initial part of building our API with Node.js. We started by looking at a basic HTTP server built with only the built-in HTTP module, for us to appreciate the basic building blocks of a Node.js web application. We then introduced doing the same thing with the Hapi.js framework.

We then went through various HTTP verbs (request methods) by example as we built our basic API with Hapi.js. Those were GET, POST, PUT, PATCH, and DELETE.

We also covered some fundamental concepts of web applications, such as logging, using good and request validation, and using Joi.

"], "sub_items": []}]}, {"title": "Building the API - Part 2", "contents": ["  \n 

This chapter is intended to revisit the previous implementation, this time saving our data in a persistent storage (database). It will also cover authentication, and unit testing and hosting as additional good-to-know concepts (but not essential). It is therefore prudent to put more emphasis on working with the DB using knex.js and authenticating your API with JWT.

By the end of this chapter, you will be able to:

"], "sub_items": [{"title": "Working with the DB Using Knex.js", "contents": ["  \n 

In this section, we're going to go through the fundamental concepts of working with the database. We will continue with the step-by-step build-up from our previous todo project. You will have noticed that our last project, we were storing our information in computer memory, and that it disappears immediately once our server returns. In real-life, you will want to store this data persistently for later access.

So, what is Knex.js? It is a SQL query-builder for relational databases like PostgreSQL, Microsoft SQL Server, MySQL, MariaDB, SQLite3, and Oracle. Basically, with something like Knex, you can write one code that will easily work with any of the mentioned databases, with no extra effort, just switching the configurations.

Let's walk through the exercise as we explain the concepts.

"], "sub_items": [{"title": "Exercise 12: Setting up the Database", "contents": ["  \n 

Let's go back to where we left off in the Exercise 11: Validating a Request of Chapter 2, Building the API – Part 1. In this example, we will be using MySQL as our database of choice. Make sure your machine is set up with MySQL and MySQL Workbench:


\nUse the Code/Lesson-3/exercise-a folder for your reference.

    \n
  1. Open MySQL Workbench. Click on the + button to create a connection:
  2. \n

    \n
  1. Add the connection name as packt, username as root, and the password (if any). Click on Test Connection to see if the connection is correct, then click on OK:
  2. \n

    \n
  1. Click on OK to create the connection.
  2. \n
    \n
  1. Now, click on the connection, packt:
  2. \n

    \n
  1. Create the todo database by running the following query, and click on the Execute icon:
  2. \n
CREATE DATABASE todo;

 

    \n
  1. The chapter files come with a basic SQL schema for our todo example project, almost similar to what we were using with the basic JavaScript array in the previous exercises:\n
      \n
    1. In the Code/Lesson-3 folder, there is a file called raw-sql.sql. Open the file with your code editor and copy the contents of the file.
    2. \n
    3. Then, go back to the MySQL Workbench.
    4. \n
    5. Paste what you copied from the file in the textbox and click on the Execute icon:
    6. \n
    \n
  2. \n

    \n
  1. \n
      \n
    1. You should see the list of the created tables (todo, todo_item, user) as below, when you click on the Refresh icon to the right of the SCHEMAS label, and click on Tables:
    2. \n
    \n
  2. \n

"], "sub_items": []}, {"title": "Exercise 13: Connecting to the Database", "contents": ["  \n 

Now that we have created out database, in this exercise we are going to connect our application to our database using the necessary npm packages, that is, knex and mysql:

    \n
  1. On the Terminal, change directory to the root of our project, and run the following command:
  2. \n
npm install mysql knex --save

 

    \n
  1. Let's create a file db.js and add the following code to it, replacing the user and password appropriately if need be:
  2. \n
const env = process.env.NODE_ENV || 'development';








 


\nYou can find the complete code from the db.js file at Code/Lesson-3/exercise-a.

    \n
  1. Let's test that we have our configurations right. We will create a test-db.js file:
  2. \n
const Knex = require('./db');



 

    \n
  1. Now, let's go to the Terminal and run the test file:
  2. \n
node test-db.js

 

You should get the following printed:

connected: 2

 

"], "sub_items": []}, {"title": "Exercise 14: Creating a Record", "contents": ["  \n 

In this exercise, we're going to write code for saving a todo and its items. To start off, let's create a dummy user since we will hardcode the user ID for our code. Later, in Exercise 19: Securing All the Routes, we will have the ID picked from the authentication details:

    \n
  1. Go back to MySQL Workbench.
  2. \n
  3. Clear the previous query and paste the following query, and click on the Execute icon:
  4. \n
USE todo;



 

    \n
  1. When you click on the user table, you should see the following; our newly created user has an ID of 1:
  2. \n

    \n
  1. Now, let's go to our routes file, /routes/todo.js and modify the code, for the POST: /todo route; change the code to be as follows (it's only the handler that is changing, notice the change to async function):\n
      \n
    1. Let's start by requiring our Knex instance that is in ./db.js. Just after the line requiring Joi, add this:
    2. \n
    \n
  2. \n
const Knex = require('../db');

 


\nNotice the two dots, ../db.js, since db.js is found in the parent folder. Recall our topic on requiring local modules in Chapter 1, Introduction to Node.js.

    \n
  1. \n
      \n
    1. Now, let's modify our handler for the POST: /todo route. Here, were are using the Knex.insert method, and adding an optional .returning method so that we get back the ID of todo we have added:
    2. \n
    \n
  2. \n
{














 

You can find the complete code from the todo.js file at Code/Lesson-3/exercise-a/routes.
\nUnlike our previous exercises in Chapter 2, Building the API – Part 1, we will split our POST: /todo route into two, POST: /todo, for adding a todo list, and POST: /todo/<id>/item for adding items to the list.

    \n
  1. Now, let's test our newly created endpoint. If you had stopped your server, go back to the Terminal and start it again, with nodemon:
  2. \n
nodemon server.js

 

    \n
  1. Go to Insomnia and make the post request; you should get something like this (notice the todo_id returned, since we will use it in our next example):
  2. \n

    \n
  1. Now, let's add a route for adding todo items, POST: /todo/<id>/item; therefore, next to the previous route object, add this route object:
  2. \n
{











 


\nYou can find the complete code from the todo.js file at Code/Lesson-3/exercise-a/routes.

    \n
  1. Now, let's test the route, /todo/1/item, 1 being the ID of todo we created in step 6:
  2. \n

"], "sub_items": []}, {"title": "Exercise 15: Reading from the Database", "contents": ["  \n 

In this exercise, we're going to write the routes for:

We will use a number Knex methods:

    \n
  1. To get a list of all todo, we will modify our previous GET: /todo route. Here, you only want to list todo items for a particular authenticated user. For now, we will be using our hardcoded test user:
  2. \n
{










 

    \n
  1. Let's modify the route for getting a single todo item, GET: /todo/<id>:
  2. \n
{











 


\nYou can find the complete code from the todo.js file at Code/Lesson-3/exercise-a/routes.

We are using array destructuring here too, since the result, if any, will be an array of length 1, so we're getting the first and only element from the array with: const [ todo ] = ...

    \n
  1. Now, let's add the route object for getting a list of items for a particular todo, preferably just after the route for adding a todo item that we did in Exercise 14: Creating a Record:
  2. \n
{










 

    \n
  1. Now, let's test the route:
  2. \n

"], "sub_items": []}, {"title": "Exercise 16: Updating a Record", "contents": ["  \n 

In this exercise, we're going to write routes for updating a todo title or a todo item, and here we will introduce a new Knex method, .update():

    \n
  1. Let's start by modifying our previous PATCH: /todo/<id> route. We have also added an extra validation to make sure that title is supplied as payload:
  2. \n
{








 

    \n
  1. Let's test the route:
  2. \n

    \n
  1. Now, let's add another PATCH route for /todo/<id>/item, this will help in editing a todo item's text and also marking a todo item as done or not done:
  2. \n
{













 


\nYou can find the complete code from the todo.js file at Code/Lesson-3/exercise-a/routes.

    \n
  1. This route can take each of the payload items one at a time (which will be the most practical case, when using, for example, a web or mobile UI), or all at once:\n
      \n
    1. For instance, changing the item from Nairobi to Nigeria, or:
    2. \n
    \n
  2. \n

    \n
  1. \n
      \n
    1. Marking the item as done:
    2. \n
    \n
  2. \n

    \n
  1. When we list the items again through the GET: /todo/<id>/item route, you will see the updated item:
  2. \n

"], "sub_items": []}, {"title": "Exercise 17: Deleting a Record", "contents": ["  \n 

In this exercise, we will be introducing the last vital Knex method to complete our Create, Read, Update, Delete (CRUD) journey, .delete():

    \n
  1. Let's add a route for deleting a todo item:
  2. \n
{











 

    \n
  1. Now, let's add one more item on our previous todo (of ID 1), then delete it:\n
      \n
    1. Add item:
    2. \n
    \n
  2. \n

    \n
  1. \n
      \n
    1. Now that we have its ID (2, in this case), delete it:
    2. \n
    \n
  2. \n

"], "sub_items": []}, {"title": "Exercise 18: Cleaning up the Code", "contents": ["  \n 

Now that we have almost updated all our routes that we had from Chapter 2, Building the API – Part 1, let's now remove all the code that is no longer needed:

    \n
  1. Remove the previously hardcoded list of todos:
  2. \n
const todoList = [


 

    \n
  1. Remove the PUT: /todo/<id> route object:
  2. \n
{










 

    \n
  1. Reimplement the DELETE: /todo/<id> route object, very similar to Exercise 17: Deleting a Record; the difference is just the route:
  2. \n
{











 

Since our SQL query had this line that adds a constraint which is possible when a todo is deleted, all the items for that todo are also deleted:
\nCREATE TABLE todo_item(
\n  'id' INT PRIMARY KEY AUTO_INCREMENT,
\n  'text' VARCHAR(50),
\n  'done' BOOLEAN,
\n  'date_created' TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
\n  'todo_id' INT,
\n  FOREIGN KEY (`todo_id`) REFERENCES `todo` (`id`) ON DELETE CASCADE
\n);

"], "sub_items": []}]}, {"title": "Authenticating Your API with JWT", "contents": ["  \n 

So far, we have been using our API without any authentication. This means that if this API is hosted at a public place, anyone can access any of the routes, including deleting all our records! Any proper API needs authentication (and authorization). Basically, we need to know who is doing what, and if they are authorized (allowed) to do that.

JSON Web Tokens (JWT) is an open, industry standard method for representing claims securely between two parties. Claims are any bits of data that you want someone else to be able to read and/or verify but not alter.

To identify/authenticate users for our API, the user puts a standard-based token in the header (with the Authorization key) of the request (prefixing it with the word Bearer). We will see this practically in a short while.

"], "sub_items": [{"title": "Exercise 19: Securing All the Routes", "contents": ["  \n 

In this exercise, we're going to secure all the /todo/* routes that we created so that no unauthenticated user can access them. In the Exercise 21: Implementing Authorization, we will differentiate between an unauthenticated and an unauthorized user:

    \n
  1. We will first start by installing a Hapi.js plugin for JWT, hapi-auth-jwt. Go to the Terminal and run:
  2. \n
npm install hapi-auth-jwt --save

 


\nUse the Code/Lesson-3/exercise-b for your reference.

    \n
  1. We will modify the routes array that we get from ./routes/todo.js in the server.js file:\n
      \n
    1. First, begin by requiring the installed hapi-auth-jwt at the top of the file:
    2. \n
    \n
  2. \n
const hapiAuthJwt = require('hapi-auth-jwt');

 

    \n
  1. \n
      \n
    1. Then, replace the old line, server.route(routes.todo), with this:
    2. \n
    \n
  2. \n
server.register(hapiAuthJwt, (err) => 











 


\nYou can find the complete code from the server.js file at Code/Lesson-3/exercise-b.

    \n
  1. Now, try accessing any of the routes, for example, GET: /todo; you should get this:
  2. \n

"], "sub_items": []}, {"title": "Exercise 20: Adding User Authentication", "contents": ["  \n 

Now that we have secured all our todo routes, we need a way to issue tokens to valid users to access the API. We will have the users send their email and password to a route (/auth), and our API will issue back an authentication token which will be used for each request:

    \n
  1. In the /routes folder, create a file auth.js.
  2. \n
  3. We will now need two more packages for this, jsonwebtoken for signing the authentication token, and md5 for comparing the password since if you recall, we were using MySQL's md5 function to store the user's password:
  4. \n
npm install jsonwebtoken md5 --save

 

    \n
  1. In the auth.js file, have the following code:
  2. \n
const jwt = require('jsonwebtoken');









 


\nYou can find the complete code from the auth.js file at Code/Lesson-3/exercise-b/routes.

    \n
  1. Now, let's register our auth.js route with the server. In server.js, after routes.todo = ..., add the following code:
  2. \n
routes.auth = require('./routes/auth');

 

    \n
  1. After the line initializing the server, we can add the route registration:
  2. \n
server.route(routes.auth);

 

    \n
  1. Now, let's try out our route, POST: /auth:\n
      \n
    1. First, with the incorrect email/password combination:
    2. \n
    \n
  2. \n

    \n
  1. \n
      \n
    1. Then, with the correct password, remember Exercise 14: Creating a Record, step 2 where we created the test user with the password:
    2. \n
    \n
  2. \n

    \n
  1. Now, we can copy the generated token and use it for our subsequent requests, for example, GET: /todo, by adding an Authorization header. Thus remember, we start with the word Bearer, then space, then paste the token; that's the JWT convention:
  2. \n

    \n
  1. And we can now access the route without getting the unauthorized responses, like in step 6 of 20th exercise:
  2. \n

    \n
  1. Now, let's go back to the places in our ./routes/todo.js file where we were hardcoding the users, and now get them from the authentication object, that is:
  2. \n
const userId = request.auth.credentials.id;

 

Recall in the preceding step 3, when we were signing our token, we provided the user details, that is, name, email, and id. This is where we get the .id in request.auth.credentials.id:
\njwt.sign(
\n{
\n  name: user.name,
\n  email: user.email,
\n  id: user.id,
\n},
\n...
\n);

    \n
  1. Now, let's go back to our phpMyAdmin web interface and create another user, just like we did in Exercise 14: Creating a Record, step 2, and paste the following SQL in the SQL text area:
  2. \n
INSERT INTO 'user' ('id', 'name', 'email', 'password')


 

    \n
  1. Now, let's go and do another POST: /auth request with the new user and obtain the token:
  2. \n

    \n
  1. Let's use this new token to create another todo list by doing a POST: /todo request:\n
      \n
    1. On Insomnia, go to the Header section, delete the previous Authorization header and replace it with the new one:
    2. \n
    \n
  2. \n

    \n
  1. \n
      \n
    1. Now, let's make our request:
    2. \n
    \n
  2. \n

    \n
  1. \n
      \n
    1. Let's see the new list of todos, by doing GET: /todo:
    2. \n
    \n
  2. \n

    \n
  1. \n
      \n
    1. As you can see, the newly created user can only see what they have created. We have done a good job so far, as far as authorization is concerned. However, let's try and check the items for todo ID 1, which belonged to the first user:
    2. \n
    \n
  2. \n

Oops! We can see someone else's todo list items; this is a security flaw. This leads us to the final part of this topic, authorization.

"], "sub_items": []}, {"title": "Authentication versus Authorization", "contents": ["  \n 

Through authentication, we get to know who is accessing our API; through authorization, we get to tell who can access what, within our API.

"], "sub_items": []}, {"title": "Exercise 21: Implementing Authorization", "contents": ["  \n 

In this exercise, we are going to refine our API to make sure that users are only authorized to access their todos and todo items:

    \n
  1. Let's first fix the flaw that we came across in Exercise 20: Adding User Authentication, step 12. So, we will modify the GET: /todo/<id> item route object in /routes/todo.js, by first checking if the todo belongs to the user before they can access its items:
  2. \n
{









 


\nYou can find the complete code from the todo.js file at Code/Lesson-3/exercise-b/routes.

    \n
  1. Now, when we go back to access GET: /todo/1/item, we get the right error message:
  2. \n

    \n
  1. You can add extra authorization logic for the following routes:
  2. \n

Cross-Origin Resource Sharing (CORS) is a mechanism that uses additional HTTP headers to let a user agent (browser) gain permission to access selected resources from a server on a different origin (domain) than the site currently in use. For instance, when you are hosting a web application frontend on another domain, because of browser restriction, you will not be able to access the API.
\nWe therefore need to explicitly state that our API will allow cross-origin requests. We will modify the server.js file, at the place we were initializing the server connection, to enable CORS:

server.connection(








 

"], "sub_items": []}]}, {"title": "Testing Your API with Lab", "contents": ["  \n 

In this section, we will have a brief look at writing unit tests for Hapi.js APIs. Testing is a huge topic that perhaps requires a whole course on its own, but in this section, we will be introducing the essential parts to get you up and running.

Let's first underscore the importance of writing unit tests for your API:

Hapi.js conventionally uses Lab (https://github.com/hapijs/lab) as its testing framework. We're going to write a few tests for our API in the next exercise.

"], "sub_items": [{"title": "Exercise 22: Writing Basic Tests with Lab", "contents": ["  \n 

In this exercise, we will introduce the concept of writing unit tests for the Hapi.js web API, mainly using the third-party lab module and the built-in assert module. Ideally, we should have a separate database for our tests, but for the sake of simplicity here, we will share our development database for tests too:

    \n
  1. Let's first start by installing the necessary packages. Notice that we are using --save-dev since tests are not needed for production, therefore, they are development dependencies:
  2. \n
npm install lab --save-dev

 


\nUse the Code/Lesson-3/exercise-c for your reference.

    \n
  1. Create a test folder at the root of the project—that is where we will have our tests. Since our API is a simple one, we will only have one file with all our tests.
  2. \n
  3. In test, create a file test-todo.js.
  4. \n
  5. As a set up, test/test-todo.js requires the necessary modules that we need for our test:
  6. \n
const assert = require('assert');





 

In the first line, we are requiring assert, which is an inbuilt module if you recall from Chapter 1, Introduction to Node.js. Alternatively, you can use any other assertion libraries such as chai (https://github.com/chaijs/chai), should.js (https://github.com/tj/should.js), and others.
\n
\nLab test files must require the lab module and export a test script, as seen on line 4 prior. We will be getting the rest of the items from lab in the following line; we're going to see them in action shortly.

    \n
  1. Since we are requiring the server in line 6 of our test-todo.js file, we need to go back to our server.js file and export the server object, on the last line:
  2. \n
module.exports = server;

 

    \n
  1. For the DB configuration, let's modify our db.js file to include configurations for the test environment, pointing to the development configurations. Add this line right after the configs definition:
  2. \n
configs.test = configs.development;

 

    \n
  1. Let's modify the server connection setup code so that the port for our testing server is set from the environment variables when running the tests. This allows us to have the test server run on a different port, while our development server is running:
  2. \n
server.connection(








 

    \n
  1. There are a number of methods that we will use from the lab module; we will need to use object destructuring to get them. Add the following line in our test-todo.js file:
  2. \n
const





 

    \n
  1. Let's start by writing a simple test that makes sure that the GET: / request goes through, and returns { message: 'hello, world' } as defined:
  2. \n
experiment('Base API', () => 










 


\nYou can find the complete code from the test-todo.js file at Code/Lesson-3/exercise-c/test.
\nWe now see experiment, test, and assert.equal methods in action. experiment is basically a way of grouping together the tests, and the actual tests are written within the test method's callback function (known as test cases). assert.equal here is just comparing the two values to make sure they are equal, and if not, an assertion error will be thrown.

    \n
  1. Now, let's run our tests:
    \n
      \n
    1. On the Terminal (open a new Terminal if you're running the API on one of them), navigate to the root of our project and run the following command:
    2. \n
    \n
  2. \n
PORT=8001 ./node_modules/lab/bin/lab test --leaks

 

We're adding an optional --leaks option to turn off memory leak detection since we don't need it now.
\nAt the beginning of the command, we're adding PORT=8001; this is a way of passing an environment variable to our script, which is why we changed our code at step 7 previously. We're now running our test server on port 8001 while our development server is still running on port 8000.

    \n
  1. \n
      \n
    1. When you run the command, you should see something close to this:
    2. \n
    \n
  2. \n

    \n
  1. We can make our test command shorter by adding it as a script on our package.json file:\n
      \n
    1. Replace the following line of code:
    2. \n
    \n
  2. \n
\"test\": \"echo \\\"Error: no test specified\\\" && exit 1\"

 

    \n
  1. \n
      \n
    1. With the following line:
    2. \n
    \n
  2. \n
\"test\": \"PORT=8001 ./node_modules/lab/bin/lab test --leaks\"

 

    \n
  1. \n
      \n
    1. Now, go back to the Terminal and just run:
    2. \n
    \n
  2. \n
npm test

 

    \n
  1. Now, let's test that our authentication is working correctly. Add the following
    \nsegment after the previous one:
  2. \n
experiment('Authentication', () =>














 

    \n
  1. Now, go back and run npm test. Both tests should be passing:
  2. \n

    \n
  1. You realize that we're having to go back to the Terminal every other time to run the tests. This is just as cumbersome as testing on the API client (Insomnia); we need some automation:\n
      \n
    1. We will need gulp.js for this, and two other Gulp plugins. Let's install them:
    2. \n
    \n
  2. \n
install gulp gulp-shell gulp-watch --save-dev

 

    \n
  1. \n
      \n
    1. Now, let's write a simple gulpfile.js at the root of our project to automate our testing task:
    2. \n
    \n
  2. \n
const gulp = require('gulp');




 


\nYou can find the complete code from the gulpfile.js file at Code/Lesson-3/exercise-c.

    \n
  1. \n
      \n
    1. Now, let's go to package.json and add another script option for our gulp task, next to the previous test:
    2. \n
    \n
  2. \n
\"scripts\": 




 

    \n
  1. \n
      \n
    1. Now, go to the Terminal and instead of npm test, run the following:
    2. \n
    \n
  2. \n
npm run test:dev

 

    \n
  1. \n
      \n
    1. The watch task will be fired up, and therefore, any changes made in any of the files within the src array in the preceding point, the test will automatically run. This mean you can go on with your development work and periodically check that the tests are all good:
    2. \n
    \n
  2. \n

    \n
  1. Let's now write a sample test for GET: /todo route. Remember that for all the authenticated routes, we need the token first, for us to make a successful request. We will therefore need a script to get us the token before any tests begin. This is where the before function that we got in step 8 kicks in. In our test-todo.js file, add the following segment:
  2. \n
experiment('/todo/* routes', () => 













 


\nYou can find the complete code from the test-todo.js file at Code/Lesson-3/exercise-c/test.

"], "sub_items": []}]}, {"title": "Summary", "contents": ["  \n 

In this chapter, we have explored quite a lot. We started off with introducing Knex.js and how we can use it to connect and use the database. We went through the essential CRUD database methods. We then covered how we can authenticate our API and prevent it from unauthorized access, using the JWT mechanism. We also mentioned something important about CORS, how the browsers handle this and how we can enable this on our API. We finally finished off with covering concepts about testing our API, using the Lab library. We also covered, in passing, the concept of test automation using gulp.js.

In this book, we started off with learning how to implement the necessary modules to get simple applications up and running. We then moved on to implementing the async and await functions to handle asynchronous code efficiently. After a primer on Node.js (the application building aspect), we graduated to building an API using Node.js. To do this, we initially used the built-in module and then utilized the rich Hapi.js framework. We also understood the advantages of the Hapi.js framework. Later on, we learned how to handle requests from API clients and finally, we completed the book by covering interactions with databases.

This is a practical quick-start guide. To further your knowledge, you should consider building real-time applications with Node.js. We have recommended a few books in the next section, but ensure you check our website to find other books that may interest you!

"], "sub_items": []}]}, {"title": "Other Books You May Enjoy", "contents": ["  \n 

If you enjoyed this book, you may be interested in these other books by Packt:

RESTful Web API Design with Node.js 10 - Third Edition
\nValentin Bojinov

ISBN: 978-1-78862-332-2

Advanced Node.js Development
\nAndrew Mead

ISBN: 978-1-78839-393-5 

"], "sub_items": [{"title": "Leave a review - let other readers know what you think", "contents": ["  \n 

Please share your thoughts on this book with others by leaving a review on the site that you bought it from. If you purchased the book from Amazon, please leave us an honest review on this book's Amazon page. This is vital so that other potential readers can see and use your unbiased opinion to make purchasing decisions, we can understand what our customers think about our products, and our authors can see your feedback on the title that they have worked with Packt to create. It will only take a few minutes of your time, but is valuable to other potential customers, our authors, and Packt. Thank you!

"], "sub_items": []}]}, {"title": "To check #0, filename: text/part0010.html", "contents": [" \n

Table of Contents

"], "sub_items": []}, {"title": "To check #1, filename: titlepage.xhtml", "contents": ["

\n\n\n\n

"], "sub_items": []}], "footnotes": []} \ No newline at end of file