Skip to content

Commit 2d5fd23

Browse files
committed
Update
1 parent 13eefb8 commit 2d5fd23

4 files changed

Lines changed: 166 additions & 52 deletions

File tree

backend/import-service/cdk_test/src/CdkTest/CdkTestStack.cs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33
using Amazon.CDK;
44
using Amazon.CDK.AWS.APIGateway;
55
using Amazon.CDK.AWS.Lambda;
6-
using Amazon.CDK.AWS.Lambda.EventSources;
76
using Amazon.CDK.AWS.S3;
87
using Amazon.CDK.AWS.S3.Notifications;
98
using Constructs;
Lines changed: 51 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -1,61 +1,66 @@
1-
const { S3Client, GetObjectCommand, CopyObjectCommand, DeleteObjectCommand } = require("@aws-sdk/client-s3");
2-
const csv = require("csv-parser");
3-
const stream = require("stream");
4-
const { HTTP_STATUS, MESSAGES } = require('./constants');
1+
const { S3Client, GetObjectCommand, CopyObjectCommand, DeleteObjectCommand } = require('@aws-sdk/client-s3');
2+
const csvParser = require("csv-parser");
3+
const { getCorsHeaders } = require("./cors");
4+
const { HTTP_STATUS, MESSAGES } = require("./constants");
55

6-
const client = new S3Client();
76

87
exports.handler = async (event) => {
9-
console.log("Import request", event);
8+
const s3 = new S3Client();
109

11-
const bucketName = event.Records[0].s3.bucket.name;
12-
const fileName = event.Records[0].s3.object.key;
13-
console.log("bucketName", bucketName);
14-
console.log("fileName", fileName);
10+
const origin = event.headers?.origin;
11+
const headers = getCorsHeaders(origin);
1512

16-
try {
17-
const getCommand = new GetObjectCommand({
18-
Bucket: bucketName,
19-
Key: fileName,
20-
});
13+
const bucketName = event['Records'][0]['s3']['bucket']['name'];
14+
const objectName = event['Records'][0]['s3']['object']['key'];
2115

22-
const parsedKey = fileName.replace("uploaded/", "parsed/");
16+
console.log('bucket:', bucketName);
17+
console.log('key:', objectName);
2318

24-
const copyCommand = new CopyObjectCommand({
25-
Bucket: bucketName,
26-
CopySource: `${bucketName}/${fileName}`,
27-
Key: parsedKey,
19+
try {
20+
const getObjectCommand = new GetObjectCommand({
21+
Bucket: bucketName,
22+
Key: objectName,
2823
});
2924

30-
const deleteCommand = new DeleteObjectCommand({
31-
Bucket: bucketName,
32-
Key: fileName,
33-
});
25+
const s3Object = await s3.send(getObjectCommand);
26+
27+
const results = [];
28+
const parser = s3Object.Body.pipe(csvParser());
29+
30+
for await (const record of parser) {
31+
results.push(record);
32+
}
33+
34+
console.log('Parsed CSV data:', results)
35+
36+
const newObjectKey = `parsed/${objectName.split('/').pop()}`;
37+
await s3.send(new CopyObjectCommand({
38+
Bucket: bucketName,
39+
CopySource: `${bucketName}/${objectName}`,
40+
Key: newObjectKey
41+
}));
3442

35-
const res = await client.send(getCommand);
36-
console.log(res);
37-
const parsed = [];
43+
console.log('Copied file:', newObjectKey);
3844

39-
if (res.Body instanceof stream.Readable) {
40-
res.Body.pipe(csv())
41-
.on("data", (data) => parsed.push(data))
42-
.on("end", async () => {
43-
console.log("Parsed CSV:", parsed);
45+
await s3.send(new DeleteObjectCommand({
46+
Bucket: bucketName,
47+
Key: objectName
48+
}));
4449

45-
// Copy to "parsed/"
46-
await client.send(copyCommand);
50+
console.log('Deleted file:', objectName);
4751

48-
// Delete from "uploaded"
49-
await client.send(deleteCommand);
52+
return {
53+
statusCode: HTTP_STATUS.OK,
54+
headers,
55+
body: JSON.stringify({ message: 'CSV File was processed' }),
56+
}
57+
} catch (error) {
58+
console.log('Error processing CSV file:', error);
5059

51-
console.log(
52-
'Successfully parsed the CSV and moved the file from "uploaded" to "parsed" folder'
53-
);
54-
});
55-
} else {
56-
throw new Error("Not a readable stream");
60+
return {
61+
statusCode: HTTP_STATUS.INTERNAL_SERVER_ERROR,
62+
headers,
63+
body: JSON.stringify({ message: MESSAGES.INTERNAL_SERVER_ERROR }),
64+
};
5765
}
58-
} catch (error) {
59-
console.error("Error", error);
60-
}
61-
};
66+
};

backend/import-service/package-lock.json

Lines changed: 1 addition & 2 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.
Lines changed: 114 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,116 @@
1-
describe('Temporary Placeholder Test', () => {
2-
it('should pass without any assertions', () => {
3-
// This is a temporary placeholder test that will pass without any assertions
1+
const { S3Client, GetObjectCommand, CopyObjectCommand, DeleteObjectCommand } = require('@aws-sdk/client-s3');
2+
const csvParser = require('csv-parser');
3+
const { getCorsHeaders } = require('../lambdas/cors');
4+
const { HTTP_STATUS, MESSAGES } = require('../lambdas/constants');
5+
const { handler } = require('../lambdas/importFileParser');
6+
7+
jest.mock('@aws-sdk/client-s3');
8+
jest.mock('csv-parser');
9+
10+
describe('Lambda handler', () => {
11+
let sendMock;
12+
13+
beforeEach(() => {
14+
sendMock = jest.fn();
15+
S3Client.mockImplementation(() => ({
16+
send: sendMock
17+
}));
18+
});
19+
20+
afterEach(() => {
21+
jest.resetAllMocks();
22+
});
23+
24+
test('should process CSV and return success response', async () => {
25+
const mockEvent = {
26+
headers: {
27+
origin: 'http://example.com'
28+
},
29+
Records: [{
30+
s3: {
31+
bucket: {
32+
name: 'my-bucket'
33+
},
34+
object: {
35+
key: 'path/to/myfile.csv'
36+
}
37+
}
38+
}]
39+
};
40+
41+
// Mock the S3 getObject response to return a readable stream
42+
const { PassThrough } = require('stream');
43+
const mockStream = new PassThrough();
44+
mockStream.end('name,age\nAlice,30\nBob,25\n');
45+
46+
sendMock
47+
.mockResolvedValueOnce({ Body: mockStream }) // getObjectCommand
48+
.mockResolvedValueOnce({}) // copyObjectCommand
49+
.mockResolvedValueOnce({}); // deleteObjectCommand
50+
51+
// Mock the csvParser to return a readable stream of parsed CSV objects
52+
const mockCsvStream = new PassThrough({ objectMode: true });
53+
setImmediate(() => {
54+
mockCsvStream.write({ name: 'Alice', age: '30' });
55+
mockCsvStream.write({ name: 'Bob', age: '25' });
56+
mockCsvStream.end();
457
});
58+
59+
csvParser.mockReturnValue(mockCsvStream);
60+
61+
const response = await handler(mockEvent);
62+
63+
expect(sendMock).toHaveBeenCalledTimes(3);
64+
expect(sendMock).toHaveBeenNthCalledWith(1, expect.any(GetObjectCommand));
65+
expect(sendMock).toHaveBeenNthCalledWith(2, expect.any(CopyObjectCommand));
66+
expect(sendMock).toHaveBeenNthCalledWith(3, expect.any(DeleteObjectCommand));
67+
68+
expect(response).toEqual({
69+
statusCode: 200,
70+
headers: {
71+
'Access-Control-Allow-Origin': '*',
72+
'Access-Control-Allow-Headers': 'Content-Type',
73+
'Access-Control-Allow-Methods': 'GET,POST,PUT,DELETE,OPTIONS',
74+
'Content-Type': 'application/json'
75+
},
76+
body: JSON.stringify({ message: 'CSV File was processed' })
77+
});
78+
});
79+
80+
test('should handle errors and return error response', async () => {
81+
const mockEvent = {
82+
headers: {
83+
origin: 'http://example.com'
84+
},
85+
Records: [{
86+
s3: {
87+
bucket: {
88+
name: 'my-bucket'
89+
},
90+
object: {
91+
key: 'path/to/myfile.csv'
92+
}
93+
}
94+
}]
95+
};
96+
97+
const errorMessage = 'Something went wrong';
98+
99+
sendMock.mockRejectedValue(new Error(errorMessage));
100+
101+
const response = await handler(mockEvent);
102+
103+
expect(sendMock).toHaveBeenCalledTimes(1);
104+
expect(response.statusCode).toBe(HTTP_STATUS.INTERNAL_SERVER_ERROR);
105+
expect(response).toEqual({
106+
statusCode: HTTP_STATUS.INTERNAL_SERVER_ERROR,
107+
headers: {
108+
'Access-Control-Allow-Origin': '*',
109+
'Access-Control-Allow-Headers': 'Content-Type',
110+
'Access-Control-Allow-Methods': 'GET,POST,PUT,DELETE,OPTIONS',
111+
'Content-Type': 'application/json'
112+
},
113+
body: JSON.stringify({ message: MESSAGES.INTERNAL_SERVER_ERROR })
114+
});
115+
});
5116
});

0 commit comments

Comments
 (0)