diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 27a8ab2..65fc32e 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -21,17 +21,17 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Python
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
- python-version: '3.9'
+ python-version: '3.10'
- name: Setup Nodejs
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
with:
- node-version: 16
+ node-version: 20.x
- name: Install dependencies
run: |
@@ -86,7 +86,7 @@ jobs:
- name: Upload the Diagnostic Report
if: failure()
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
name: diagnose.json.gz
path: ./diagnose.json.gz
diff --git a/Makefile b/Makefile
index a983274..40a97bc 100644
--- a/Makefile
+++ b/Makefile
@@ -11,11 +11,12 @@ usage:
install:
@which localstack || pip install localstack
@which awslocal || pip install awscli-local
- @which serverless || npm install -g serverless
+ @which serverless || npm install -g serverless@3.38.0
npm install
## Deploy the Transcribe sample
deploy:
+
sls deploy --stage local --verbose
make configure
diff --git a/package.json b/package.json
index 7f788ff..65a3332 100644
--- a/package.json
+++ b/package.json
@@ -1,9 +1,13 @@
{
- "dependencies": {
- "aws-sdk": "^2.1386.0",
- "serverless": "^3.31.0",
- "serverless-lift": "^1.26.1",
- "serverless-localstack": "^1.1.1"
- }
+ "type":"module",
+ "dependencies": {
+ "aws-sdk": "^2.1386.0",
+ "serverless": "^3.38.0",
+ "serverless-lift": "^1.26.1",
+ "serverless-localstack": "^1.1.1",
+ "@aws-sdk/client-s3": "^3.0.0",
+ "@aws-sdk/client-sqs": "^3.0.0",
+ "@aws-sdk/client-ses": "^3.0.0",
+ "@aws-sdk/client-transcribe": "^3.0.0"
+ }
}
-
\ No newline at end of file
diff --git a/serverless.yml b/serverless.yml
index 690fb54..6cfd479 100644
--- a/serverless.yml
+++ b/serverless.yml
@@ -14,13 +14,13 @@ plugins:
custom:
defaultStage: local
profile: default
- runtime: nodejs14.x
+ runtime: nodejs20.x
localstack:
stages: [local]
provider:
name: aws
- runtime: nodejs14.x
+ runtime: nodejs20.x
environment:
S3_AUDIO_BUCKET: ${self:service}-${opt:stage, self:provider.stage}-records
S3_TRANSCRIPTION_BUCKET: ${self:service}-${opt:stage, self:provider.stage}-transcriptions
@@ -45,7 +45,7 @@ provider:
functions:
transcribe:
- handler: src/transcribe.process
+ handler: src/transcribe.transcribe_process
events:
- s3:
bucket: ${self:provider.environment.S3_AUDIO_BUCKET}
diff --git a/src/jobs.js b/src/jobs.mjs
similarity index 82%
rename from src/jobs.js
rename to src/jobs.mjs
index bea0173..2abac97 100644
--- a/src/jobs.js
+++ b/src/jobs.mjs
@@ -1,13 +1,20 @@
'use strict';
-const awsSdk = require('aws-sdk');
+import { S3, ListObjectsCommand } from "@aws-sdk/client-s3";
const endpoint = process.env.AWS_ENDPOINT_URL
-const s3 = new awsSdk.S3({endpoint: endpoint, s3ForcePathStyle: true});
+const s3 = new S3({
+ endpoint: endpoint,
+ forcePathStyle: true,
+ credentials: {
+ accessKeyId:'test',
+ secretAccessKey:'test'
+},
+});
// This function is triggered by an HTTP request using the GET method.
// The function returns a list of all the transcription jobs stored in the S3 bucket.
-exports.list = async (event, context, callback) => {
+export const list = async (event, context, callback) => {
var htmlStr = `
@@ -33,8 +40,8 @@ exports.list = async (event, context, callback) => {
};
try {
- const { Contents } = await s3.listObjects(params).promise();
- const keys = Contents.map(({ Key }) => Key);
+ const data = await s3.send(new ListObjectsCommand(params));
+ const keys = data.Contents.map(({ Key }) => Key);
keys.forEach(object => {
htmlStr += `
${object}`
});
diff --git a/src/presign.js b/src/presign.mjs
similarity index 65%
rename from src/presign.js
rename to src/presign.mjs
index 6410e45..4c3c85c 100644
--- a/src/presign.js
+++ b/src/presign.mjs
@@ -1,30 +1,30 @@
-const AWS = require('aws-sdk');
-const credentials = new AWS.Credentials({
- accessKeyId: 'test',
- secretAccessKey: 'test'
+import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
+import { PutObjectCommand, S3 } from '@aws-sdk/client-s3';
+
+const s3 = new S3({
+ endpoint: 'https://s3.localhost.localstack.cloud:4566',
+ forcePathStyle: true,
+ credentials: {
+ accessKeyId:'test',
+ secretAccessKey:'test'
+ },
});
-AWS.config.credentials = credentials;
-
-const s3 = new AWS.S3(
- {
- endpoint: 'https://s3.localhost.localstack.cloud:4566',
- s3ForcePathStyle: true,
- }
-);
// This function is triggered by an HTTP request using the POST method.
// The function returns a presigned URL to upload a file to S3.
-exports.post = async (event) => {
+export const post = async (event) => {
const bucketName = 'aws-node-sample-transcribe-s3-local-records';
const key = event.queryStringParameters.filename;
const expiration = 3600;
try {
- const url = await s3.getSignedUrlPromise('putObject', {
+ const url = await getSignedUrl(s3, new PutObjectCommand({
Bucket: bucketName,
Key: key,
- Expires: expiration,
+ }), {
+ expiresIn: expiration,
});
+ console.log('Presigned URL: ', url);
return {
statusCode: 200,
body: {"url": url},
diff --git a/src/queue.js b/src/queue.js
deleted file mode 100644
index 63d0541..0000000
--- a/src/queue.js
+++ /dev/null
@@ -1,101 +0,0 @@
-const awsSdk = require('aws-sdk');
-
-const endpoint = process.env.AWS_ENDPOINT_URL;
-const sqs = new awsSdk.SQS({endpoint: endpoint});
-const ses = new awsSdk.SES({endpoint: endpoint});
-const s3 = new awsSdk.S3({endpoint: endpoint, s3ForcePathStyle: true});
-const queueUrl = `${endpoint}/000000000000/aws-node-sample-transcribe-s3-local-jobs`;
-const transcriptionBucket = process.env.S3_TRANSCRIPTION_BUCKET
-
-// This function consumes the event from s3 PutObject and pushes a new message to SQS.
-const producer = async (event, context, callback) => {
- let statusCode = 200;
- let message;
-
- try {
- // Get the record from the s3 event
- const records = event.Records;
- const sqsSendMessagePromises = records.map((record) => {
- var jsonContent = "";
- const params = {
- Bucket: transcriptionBucket,
- Key: record.s3.object.key
- };
- s3.getObject(params, (err, data) => {
- if (err) {
- console.error("Error getting object from S3 bucket: ", transcriptionBucket)
- } else {
- jsonContent = JSON.parse(data.Body.toString());
-
- // Send message to SQS queue
- return sqs.sendMessage({
- QueueUrl: queueUrl,
- MessageBody: jsonContent.results.transcripts[0].transcript,
- MessageAttributes: {
- AttributeName: {
- StringValue: "Attribute Value",
- DataType: "String",
- },
- },
- }).promise();
- }
- });
- });
-
- Promise.all(sqsSendMessagePromises)
- .then(() => {
- callback(null, { message: 'Message sent successfully' });
- })
- .catch(err => callback(err, { message: 'Error sending message' }));
- message = "Message accepted!";
- } catch (error) {
- console.log(error);
- message = error;
- statusCode = 500;
- }
- return {
- statusCode,
- body: JSON.stringify({
- message,
- }),
- };
-};
-
-// The function is triggered by the SQS queue.
-// A function is triggered whenever there is a new message in the queue.
-// When the function is triggered, it sends an email to 'sender@example.com'
-const consumer = async (event) => {
- for (const record of event.Records) {
- const params = {
- Destination: {
- ToAddresses: ["recipient@example.com"] // Email address/addresses that you want to send your email
- },
- Message: {
- Body: {
- Text: {
- Charset: "UTF-8",
- Data: `Hey there! Here is your generated transcribed file:\n${record.body}`
- }
- },
- Subject: {
- Charset: "UTF-8",
- Data: "Test Email - JOB COMPLETED SUCCESSFULLY"
- }
- },
- Source: "sender@example.com" // Sender email address
- };
-
- try {
- // Send email to recipient
- const result = await ses.sendEmail(params).promise();
- console.log("Email sent successfully: ", result);
- } catch (error) {
- console.error("Error sending email: ", error);
- }
- }
-};
-
-module.exports = {
- producer,
- consumer,
-};
diff --git a/src/queue.mjs b/src/queue.mjs
new file mode 100644
index 0000000..6f6ac7c
--- /dev/null
+++ b/src/queue.mjs
@@ -0,0 +1,122 @@
+import { S3, GetObjectCommand } from "@aws-sdk/client-s3";
+import { SES, SendEmailCommand } from "@aws-sdk/client-ses";
+import { SQS, SendMessageCommand } from "@aws-sdk/client-sqs";
+
+const endpoint = process.env.AWS_ENDPOINT_URL;
+const sqs = new SQS({
+ endpoint: endpoint,
+ credentials: {
+ accessKeyId:'test',
+ secretAccessKey:'test'
+},
+});
+const ses = new SES({
+ endpoint: endpoint,
+ credentials: {
+ accessKeyId:'test',
+ secretAccessKey:'test'
+},
+});
+const s3Client = new S3({
+ endpoint: endpoint,
+ forcePathStyle: true,
+ credentials: {
+ accessKeyId:'test',
+ secretAccessKey:'test'
+},
+});
+const queueUrl = `${endpoint}/000000000000/aws-node-sample-transcribe-s3-local-jobs`;
+const transcriptionBucket = process.env.S3_TRANSCRIPTION_BUCKET
+
+// This function consumes the event from s3 PutObject and pushes a new message to SQS.
+const producer = async (event, context, callback) => {
+ let statusCode = 200;
+ let message;
+
+ try {
+ // Get the record from the s3 event
+ const records = event.Records;
+ const sqsSendMessagePromises = records.map(async (record) => {
+ const params = {
+ Bucket: transcriptionBucket,
+ Key: record.s3.object.key,
+ };
+
+ try {
+ const data = await s3Client.send(new GetObjectCommand(params));
+ const str = await data.Body.transformToString();
+ console.log("str: ", str);
+
+ const jsonContent = await JSON.parse(str);
+ console.log("jsonContent: ", jsonContent);
+
+ // Send message to SQS queue
+ const sendMessageParams = {
+ QueueUrl: queueUrl,
+ MessageBody: jsonContent.results.transcripts[0].transcript,
+ MessageAttributes: {
+ AttributeName: {
+ StringValue: "Attribute Value",
+ DataType: "String",
+ },
+ },
+ };
+ await sqs.send(new SendMessageCommand(sendMessageParams));
+ } catch (err) {
+ console.error("Error getting object from S3 bucket: ", transcriptionBucket, err);
+ throw err;
+ }
+ });
+
+ await Promise.all(sqsSendMessagePromises);
+ callback(null, { message: 'Message sent successfully' });
+ message = "Message accepted!";
+ } catch (error) {
+ console.log(error);
+ message = error.message;
+ statusCode = 500;
+ }
+
+ return {
+ statusCode,
+ body: JSON.stringify({
+ message,
+ }),
+ };
+};
+
+// The function is triggered by the SQS queue.
+// A function is triggered whenever there is a new message in the queue.
+// When the function is triggered, it sends an email to 'sender@example.com'
+const consumer = async (event) => {
+ for (const record of event.Records) {
+ const params = {
+ Destination: {
+ ToAddresses: ["recipient@example.com"] // Email address/addresses that you want to send your email
+ },
+ Message: {
+ Body: {
+ Text: {
+ Charset: "UTF-8",
+ Data: `Hey there! Here is your generated transcribed file:\n${record.body}`
+ }
+ },
+ Subject: {
+ Charset: "UTF-8",
+ Data: "Test Email - JOB COMPLETED SUCCESSFULLY"
+ }
+ },
+ Source: "sender@example.com" // Sender email address
+ };
+
+ try {
+ // Send email to recipient
+ const result = await ses.send(new SendEmailCommand(params));
+ console.log("Email sent successfully: ", result);
+ } catch (error) {
+ console.error("Error sending email: ", error);
+ }
+ }
+};
+
+export { producer, consumer };
diff --git a/src/transcribe.js b/src/transcribe.js
deleted file mode 100644
index f2f5b24..0000000
--- a/src/transcribe.js
+++ /dev/null
@@ -1,40 +0,0 @@
-'use strict';
-
-const awsSdk = require('aws-sdk');
-
-const endpoint_url = process.env.AWS_ENDPOINT_URL
-const endpoint = new awsSdk.Endpoint(endpoint_url);
-const endpointConfig = new awsSdk.Config({
- endpoint: endpoint
-});
-
-const transcribeService = new awsSdk.TranscribeService(endpointConfig);
-
-// This function is triggered by an S3 event.
-// It starts a transcription job given an audio file.
-module.exports.process = (event, context, callback) => {
- const records = event.Records;
- console.log("Processing records: ", records);
-
- const transcribingPromises = records.map((record) => {
- const TranscriptionJobName = record.s3.object.key;
-
- // Start Transcription Job
- return transcribeService.startTranscriptionJob({
- LanguageCode: process.env.LANGUAGE_CODE,
- Media: {
- MediaFileUri: `s3://${process.env.S3_AUDIO_BUCKET}/${record.s3.object.key}` // s3 media file uri
- },
- MediaFormat: 'wav',
- TranscriptionJobName,
- MediaSampleRateHertz: 8000, // normally 8000 if you are using wav file
- OutputBucketName: process.env.S3_TRANSCRIPTION_BUCKET, // s3 bucket to store the transcription result
- }).promise();
- });
-
- Promise.all(transcribingPromises)
- .then(() => {
- callback(null, { message: 'Start transcription job successfully' });
- })
- .catch(err => callback(err, { message: 'Error start transcription job' }));
-};
diff --git a/src/transcribe.mjs b/src/transcribe.mjs
new file mode 100644
index 0000000..b8b1045
--- /dev/null
+++ b/src/transcribe.mjs
@@ -0,0 +1,52 @@
+'use strict';
+
+import { Transcribe, StartTranscriptionJobCommand } from "@aws-sdk/client-transcribe";
+
+const endpoint_url = process.env.AWS_ENDPOINT_URL
+const endpoint = new URL(endpoint_url);
+const endpointConfig = {
+ endpoint: endpoint_url,
+ credentials: {
+ accessKeyId:'test',
+ secretAccessKey:'test'
+},
+};
+
+const transcribeClient = new Transcribe(endpointConfig);
+
+// This function is triggered by an S3 event.
+// It starts a transcription job given an audio file.
+export const transcribe_process = async (event, context, callback) => {
+ const records = event.Records;
+ console.log("Processing records: ", records);
+
+ const transcribingPromises = records.map(async (record) => {
+ const TranscriptionJobName = record.s3.object.key;
+
+ // Start Transcription Job
+ try {
+ const command = new StartTranscriptionJobCommand({
+ LanguageCode: process.env.LANGUAGE_CODE,
+ Media: {
+ MediaFileUri: `s3://${process.env.S3_AUDIO_BUCKET}/${record.s3.object.key}` // s3 media file uri
+ },
+ MediaFormat: 'wav',
+ TranscriptionJobName: TranscriptionJobName,
+ MediaSampleRateHertz: 8000, // normally 8000 if you are using wav file
+ OutputBucketName: process.env.S3_TRANSCRIPTION_BUCKET, // s3 bucket to store the transcription result
+ });
+ console.log('Starting transcription job');
+ await transcribeClient.send(command);
+ } catch (err) {
+ console.log('Error starting transcription job', err);
+ throw err;
+ }
+ });
+
+ try {
+ await Promise.all(transcribingPromises);
+ callback(null, { message: 'Start transcription job successfully' });
+ } catch (err) {
+ callback(err, { message: 'Error start transcription job' });
+ }
+};
diff --git a/src/upload.js b/src/upload.mjs
similarity index 98%
rename from src/upload.js
rename to src/upload.mjs
index fb86155..afdef56 100644
--- a/src/upload.js
+++ b/src/upload.mjs
@@ -1,6 +1,6 @@
// This function is triggered by an HTTP request using the GET method.
// This function returns a HTML page with a form to upload a file to S3 and a list of files in the S3 bucket.
-exports.get = async (event) => {
+export const get = async (event) => {
const html = `