Backup AWS DynamoDB Table to Local

2020/12/313 min read
bookmark this
Responsive image

Table of Contents

  1. Introduction
  2. Export Table to S3
  3. Download Data Zip File
  4. Import to Local DynamoDB Table
  5. Conclusion

Introduction

This guide shows how to backup your AWS DynamoDB table (on the cloud) to your local DynamoDB environment. This can be helpful when you want to build a prototype using production data without affecting the live database.

Export Table to S3

First, log in to the AWS Console and navigate to the DynamoDB main page. Select the table you want to export, then choose Export to S3 bucket.

At the time of writing, you need to enable Point-in-Time Recovery before you can export to S3. Note that enabling this feature may incur additional charges. Once the export is complete, you can disable it if you no longer need it.

Download Data Zip File

Once DynamoDB exports the table to S3, you should be able to find your data under:

{your-bucket}/AWSDynamoDB/{unique-guid}/data

Download the zip file to your local machine.

The following is a sample of the JSON file downloaded from the S3 bucket. Notice that there is no comma after each item object. You can fix this by simply adding a comma , after each item object to make it valid JSON array syntax.

{"Item":{"age":{"S":"16"},"name":{"S":"Clifton Trantow"},"phone":{"S":"325-550-4097"}}}
{"Item":{"age":{"S":"3"},"name":{"S":"Dallas Zulauf"},"phone":{"S":"(201) 844-4314 x024"}}}
{"Item":{"age":{"S":"81"},"name":{"S":"Jim Corkery"},"phone":{"S":"1-615-855-7445"}}}
{"Item":{"age":{"S":"112"},"name":{"S":"Mrs. Lisa Morissette"},"phone":{"S":"451-951-5004 x91302"}}}
{"Item":{"age":{"S":"5"},"name":{"S":"Tami Lind"},"phone":{"S":"(961) 774-0217 x529"}}}
{"Item":{"age":{"S":"38"},"name":{"S":"Michele Corwin"},"phone":{"S":"(516) 496-8378 x38139"}}}
{"Item":{"age":{"S":"64"},"name":{"S":"Cory Renner"},"phone":{"S":"1-866-451-5277 x11173"}}}
{"Item":{"age":{"S":"21"},"name":{"S":"Mr. Hannah Toy"},"phone":{"S":"1-909-356-2256 x160"}}}
{"Item":{"age":{"S":"39"},"name":{"S":"Horace Luettgen"},"phone":{"S":"923.425.3214 x763"}}}
{"Item":{"age":{"S":"81"},"name":{"S":"Jake Johnson"},"phone":{"S":"838-703-7094 x17403"}}}
{"Item":{"age":{"S":"111"},"name":{"S":"Violet McClure"},"phone":{"S":"(476) 838-6678"}}}
{"Item":{"age":{"S":"88"},"name":{"S":"Faith Hoppe"},"phone":{"S":"1-632-638-0225 x871"}}}
{"Item":{"age":{"S":"102"},"name":{"S":"Floyd Kshlerin"},"phone":{"S":"1-465-918-6299 x2042"}}}
{"Item":{"age":{"S":"104"},"name":{"S":"Alvin Moore"},"phone":{"S":"1-885-900-8942 x467"}}}
{"Item":{"age":{"S":"37"},"name":{"S":"Mildred Kuhn"},"phone":{"S":"(681) 776-7208 x982"}}}
{"Item":{"age":{"S":"107"},"name":{"S":"Michele Lubowitz"},"phone":{"S":"(933) 324-2667"}}}
{"Item":{"age":{"S":"51"},"name":{"S":"Ian Weber"},"phone":{"S":"504.392.7656 x729"}}}
{"Item":{"age":{"S":"54"},"name":{"S":"Ms. Randall O'Keefe"},"phone":{"S":"1-860-892-1635"}}}
{"Item":{"age":{"S":"99"},"name":{"S":"Ricky Kutch PhD"},"phone":{"S":"767-631-7777"}}}
{"Item":{"age":{"S":"10"},"name":{"S":"Lynn Collier"},"phone":{"S":"1-868-378-3294 x945"}}}

Import to Local DynamoDB Table

In this step, we load the JSON file downloaded from S3 and insert the data into the local DynamoDB table running on port 8000. You can use DynamoDB's putItem or BatchWriteItem API to import the data.

The following example uses Node.js and runs on macOS.

const fs = require("fs");
const AWS = require("aws-sdk");
function addDataToLocal() {
  AWS.config.update({
    region: "us-west-2",
    endpoint: "http://localhost:8000",
  });

  var dynamodb = new AWS.DynamoDB();
  var readData = {};
  fs.readFile("./MyFakeFriendData.json", (err, data) => {
    if (err) throw err;
    let jsonData = JSON.parse(data);

    jsonData.Items.forEach((item) => {
      var params = {
        TableName: "MyFakeFriend",
        Item: item,
      };

      dynamodb.putItem(params, function (err, data) {
        if (err) {
          console.error("Unable to add data", JSON.stringify(err, null, 2));
        } else {
          console.log("Able to add data", data);
        }
      });
    });
  });
}

addDataToLocal();

Conclusion

This is how you can move a DynamoDB table from the AWS cloud to your local DynamoDB environment. The process involves three main steps:

  1. Export the table from DynamoDB to an S3 bucket
  2. Download the exported data zip file from S3
  3. Import the data into your local DynamoDB using the putItem API

This approach is useful for creating local prototypes with real production data, allowing you to develop and test without affecting your cloud resources.