Backup AWS DynamoDB Table to Local

7/30/2020·3 min read

This blog shows how to backup your AWS DynamoDB table (on the cloud) to your local DynamoDB environment. It could help in case you want to do a prototype but using production data.

Export table to S3

In the first step, you need to login into AWS console's DynamoDB main page, choose the table then choose export to s3 bucket. At the moment I'm writing this, you'll need to set up the Point in time backup then you can export to S3. It says could charge, but once moved to s3 you can change it back.

Download data zip file

Once DynamoDB export table to s3, you should be able to find your data under {your bucket}/AWSDynamoDB/{unique guid}/data, download the zip file to your OS.

Following is the JSON file I download from the S3 bucket, noticed that no comma after each item object. Not sure why but you can just simply do replacement by adding a comma, after each item object.

{"Item":{"age":{"S":"16"},"name":{"S":"Clifton Trantow"},"phone":{"S":"325-550-4097"}}}
{"Item":{"age":{"S":"3"},"name":{"S":"Dallas Zulauf"},"phone":{"S":"(201) 844-4314 x024"}}}
{"Item":{"age":{"S":"81"},"name":{"S":"Jim Corkery"},"phone":{"S":"1-615-855-7445"}}}
{"Item":{"age":{"S":"112"},"name":{"S":"Mrs. Lisa Morissette"},"phone":{"S":"451-951-5004 x91302"}}}
{"Item":{"age":{"S":"5"},"name":{"S":"Tami Lind"},"phone":{"S":"(961) 774-0217 x529"}}}
{"Item":{"age":{"S":"38"},"name":{"S":"Michele Corwin"},"phone":{"S":"(516) 496-8378 x38139"}}}
{"Item":{"age":{"S":"64"},"name":{"S":"Cory Renner"},"phone":{"S":"1-866-451-5277 x11173"}}}
{"Item":{"age":{"S":"21"},"name":{"S":"Mr. Hannah Toy"},"phone":{"S":"1-909-356-2256 x160"}}}
{"Item":{"age":{"S":"39"},"name":{"S":"Horace Luettgen"},"phone":{"S":"923.425.3214 x763"}}}
{"Item":{"age":{"S":"81"},"name":{"S":"Jake Johnson"},"phone":{"S":"838-703-7094 x17403"}}}
{"Item":{"age":{"S":"111"},"name":{"S":"Violet McClure"},"phone":{"S":"(476) 838-6678"}}}
{"Item":{"age":{"S":"88"},"name":{"S":"Faith Hoppe"},"phone":{"S":"1-632-638-0225 x871"}}}
{"Item":{"age":{"S":"102"},"name":{"S":"Floyd Kshlerin"},"phone":{"S":"1-465-918-6299 x2042"}}}
{"Item":{"age":{"S":"104"},"name":{"S":"Alvin Moore"},"phone":{"S":"1-885-900-8942 x467"}}}
{"Item":{"age":{"S":"37"},"name":{"S":"Mildred Kuhn"},"phone":{"S":"(681) 776-7208 x982"}}}
{"Item":{"age":{"S":"107"},"name":{"S":"Michele Lubowitz"},"phone":{"S":"(933) 324-2667"}}}
{"Item":{"age":{"S":"51"},"name":{"S":"Ian Weber"},"phone":{"S":"504.392.7656 x729"}}}
{"Item":{"age":{"S":"54"},"name":{"S":"Ms. Randall O'Keefe"},"phone":{"S":"1-860-892-1635"}}}
{"Item":{"age":{"S":"99"},"name":{"S":"Ricky Kutch PhD"},"phone":{"S":"767-631-7777"}}}
{"Item":{"age":{"S":"10"},"name":{"S":"Lynn Collier"},"phone":{"S":"1-868-378-3294 x945"}}}

Use DynamoDB put or BatchWriteItem to import to local DynamoDB table

At the following, I'm loading the JSON file downloaded from the AWS DynamoDB table and add to my local DynamoDB port 8000's MyFakeFriend table. 

The following example is using node.js and running on Mac OS.

const fs = require('fs');
const AWS = require("aws-sdk");
function addDataToLocal() {
    AWS.config.update({
        region: "us-west-2",
        endpoint: "http://localhost:8000"
    });

    var dynamodb = new AWS.DynamoDB();
    var readData = {};
    fs.readFile('./MyFakeFriendData.json', (err, data) => {
        if (err) throw err;
        let jsonData = JSON.parse(data);

        jsonData.Items.forEach(item => {

            var params = {
                TableName: 'MyFakeFriend',
                Item: item
            }

            dynamodb.putItem(params, function(err, data) {
                if (err) {
                    console.error("Unable to add data", JSON.stringify(err, null, 2));
                } else {
                    console.log("Able to add data", data);
                }
            })
        });
    });
}
addDataToLocal();

That's how so far I could you could move the table from AWS cloud to the local DynamoDB.