node.js - Why Aws s3 Bucket creating 0 bytes file while uploading file with creating new folder dynamically? -
i pretty new aws s3 bucket,i trying upload files different folders creation dynamically.even though script working.my s3 bucket creating 0 bytes file,and name equal folder last uploaded.
this server.js
import express 'express'; import path 'path'; import webpack 'webpack'; import webpackmiddleware 'webpack-dev-middleware' import webpackhotmidleware 'webpack-hot-middleware'; import bodyparser 'body-parser'; import aws 'aws-sdk'; import open 'open'; import uuid 'uuid' import webpackconfig './webpack.config'; // create s3 client aws.config.loadfrompath('c.json'); var s3 = new aws.s3(); // create bucket , upload let app = express(); app.use(bodyparser.json({limit:"50mb"})); app.use(express.static('public')); const compiler = webpack(webpackconfig) app.use(webpackmiddleware(compiler,{ hot:true, publicpath:webpackconfig.output.publicpath, noinfo:true })); app.use(webpackhotmidleware(compiler)); app.post('/api',(req,res) => { let imageuri = req.body.data.data_uri; let filename = req.body.data.filename; let filetype = req.body.data.filetype; const name = uuid.v1(); var bucketname = 'nok-sample/'+name; let buf = new buffer(imageuri.replace(/^data:image\/\w+;base64,/, ""),'base64'); console.log(buf.length); s3.createbucket({bucket: bucketname}, function() { var params = {bucket: bucketname, key: filename, body: buf,contenttype:filetype,contentlength:buf.length,acl:'public-read'}; s3.putobject(params, function(err, data) { if (err) console.log(err) else console.log("successfully uploaded data " + bucketname + "/" + filename); let resimage = { "awsurl":`https://s3.amazonaws.com/${bucketname}/${filename}` }; res.json(resimage); console.log(resimage) }); }); }); app.get('/*',(req,res) => { res.sendfile(path.join(__dirname,'./index.html')) }); app.listen(3000,() => { open('http://localhost:3000') });
Comments
Post a Comment