Skip to content

Commit 256a7db

Browse files
[Bug Fix] operations/*-access-logs-anonymizer - Improve error handling when file was already anonymized (widdix#459)
1 parent 9713a27 commit 256a7db

File tree

2 files changed

+116
-30
lines changed

2 files changed

+116
-30
lines changed

operations/alb-access-logs-anonymizer.yaml

Lines changed: 58 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -57,6 +57,14 @@ Resources:
5757
- PolicyName: s3
5858
PolicyDocument:
5959
Statement:
60+
- Effect: Allow
61+
Action:
62+
- 's3:ListBucket'
63+
- 's3:ListBucketVersions'
64+
Resource: !Sub
65+
- 'arn:${Partition}:s3:::${BucketName}'
66+
- Partition: !Ref 'AWS::Partition'
67+
BucketName: {'Fn::ImportValue': !Sub '${ParentS3Stack}-BucketName'}
6068
- Effect: Allow
6169
Action:
6270
- 's3:GetObject'
@@ -127,6 +135,7 @@ Resources:
127135
}
128136
129137
async function process(record) {
138+
const anonymizedKey = record.s3.object.key.slice(0, -2) + 'anonymized.gz';
130139
let chunk = Buffer.alloc(0);
131140
const transform = (currentChunk, encoding, callback) => {
132141
chunk = Buffer.concat([chunk, currentChunk]);
@@ -153,21 +162,55 @@ Resources:
153162
if ('versionId' in record.s3.object) {
154163
params.VersionId = record.s3.object.versionId;
155164
}
156-
const body = s3.getObject(params).createReadStream()
157-
.pipe(zlib.createGunzip())
158-
.pipe(new stream.Transform({
159-
transform
160-
}))
161-
.pipe(zlib.createGzip());
162-
await s3.upload({
163-
Bucket: record.s3.bucket.name,
164-
Key: record.s3.object.key.slice(0, -2) + 'anonymized.gz',
165-
Body: body
166-
}).promise();
167-
if (chunk.length > 0) {
168-
throw new Error('file was not read completly');
169-
}
170-
return s3.deleteObject(params).promise();
165+
return new Promise((resolve, reject) => {
166+
const body = stream.pipeline(
167+
s3.getObject(params).createReadStream(),
168+
zlib.createGunzip(),
169+
new stream.Transform({
170+
transform
171+
}),
172+
zlib.createGzip(),
173+
() => {}
174+
);
175+
s3.upload({
176+
Bucket: record.s3.bucket.name,
177+
Key: anonymizedKey,
178+
Body: body
179+
}, (err) => {
180+
if (err) {
181+
if (err) {
182+
if (err.code === 'NoSuchKey') {
183+
console.log('original no longer exist, check for anonymized object.')
184+
s3.headObject({
185+
Bucket: record.s3.bucket.name,
186+
Key: anonymizedKey
187+
}, (err) => {
188+
if (err) {
189+
reject(err);
190+
} else {
191+
// original already processed
192+
resolve();
193+
}
194+
});
195+
} else {
196+
reject(err);
197+
}
198+
}
199+
} else {
200+
if (chunk.length > 0) {
201+
reject(new Error('file was not read completly'));
202+
} else {
203+
s3.deleteObject(params, (err) => {
204+
if (err) {
205+
reject(err);
206+
} else {
207+
resolve();
208+
}
209+
});
210+
}
211+
}
212+
});
213+
});
171214
}
172215
173216
exports.handler = async (event) => {

operations/cloudfront-access-logs-anonymizer.yaml

Lines changed: 58 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -57,6 +57,14 @@ Resources:
5757
- PolicyName: s3
5858
PolicyDocument:
5959
Statement:
60+
- Effect: Allow
61+
Action:
62+
- 's3:ListBucket'
63+
- 's3:ListBucketVersions'
64+
Resource: !Sub
65+
- 'arn:${Partition}:s3:::${BucketName}'
66+
- Partition: !Ref 'AWS::Partition'
67+
BucketName: {'Fn::ImportValue': !Sub '${ParentS3Stack}-BucketName'}
6068
- Effect: Allow
6169
Action:
6270
- 's3:GetObject'
@@ -128,6 +136,7 @@ Resources:
128136
}
129137
130138
async function process(record) {
139+
const anonymizedKey = record.s3.object.key.slice(0, -2) + 'anonymized.gz';
131140
let chunk = Buffer.alloc(0);
132141
const transform = (currentChunk, encoding, callback) => {
133142
chunk = Buffer.concat([chunk, currentChunk]);
@@ -154,21 +163,55 @@ Resources:
154163
if ('versionId' in record.s3.object) {
155164
params.VersionId = record.s3.object.versionId;
156165
}
157-
const body = s3.getObject(params).createReadStream()
158-
.pipe(zlib.createGunzip())
159-
.pipe(new stream.Transform({
160-
transform
161-
}))
162-
.pipe(zlib.createGzip());
163-
await s3.upload({
164-
Bucket: record.s3.bucket.name,
165-
Key: record.s3.object.key.slice(0, -2) + 'anonymized.gz',
166-
Body: body
167-
}).promise();
168-
if (chunk.length > 0) {
169-
throw new Error('file was not read completly');
170-
}
171-
return s3.deleteObject(params).promise();
166+
return new Promise((resolve, reject) => {
167+
const body = stream.pipeline(
168+
s3.getObject(params).createReadStream(),
169+
zlib.createGunzip(),
170+
new stream.Transform({
171+
transform
172+
}),
173+
zlib.createGzip(),
174+
() => {}
175+
);
176+
s3.upload({
177+
Bucket: record.s3.bucket.name,
178+
Key: anonymizedKey,
179+
Body: body
180+
}, (err) => {
181+
if (err) {
182+
if (err) {
183+
if (err.code === 'NoSuchKey') {
184+
console.log('original no longer exist, check for anonymized object.')
185+
s3.headObject({
186+
Bucket: record.s3.bucket.name,
187+
Key: anonymizedKey
188+
}, (err) => {
189+
if (err) {
190+
reject(err);
191+
} else {
192+
// original already processed
193+
resolve();
194+
}
195+
});
196+
} else {
197+
reject(err);
198+
}
199+
}
200+
} else {
201+
if (chunk.length > 0) {
202+
reject(new Error('file was not read completly'));
203+
} else {
204+
s3.deleteObject(params, (err) => {
205+
if (err) {
206+
reject(err);
207+
} else {
208+
resolve();
209+
}
210+
});
211+
}
212+
}
213+
});
214+
});
172215
}
173216
174217
exports.handler = async (event) => {

0 commit comments

Comments
 (0)