mirror of
https://github.com/filecoffee/filehost.git
synced 2024-11-13 19:49:56 +01:00
new features & refactor
This commit is contained in:
parent
955d1aa4ca
commit
3137cb01cc
15 changed files with 5338 additions and 128 deletions
BIN
.DS_Store
vendored
Normal file
BIN
.DS_Store
vendored
Normal file
Binary file not shown.
3
.babelrc
Normal file
3
.babelrc
Normal file
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"presets": ["@babel/preset-env"]
|
||||
}
|
|
@ -1,3 +1,7 @@
|
|||
# Your host details
|
||||
HOSTER_EMAIL=hoster@file.coffee
|
||||
HOSTER_DOMAIN=https://file.coffee
|
||||
|
||||
# This can be 'local' or 's3'
|
||||
STORAGE_MODE=local
|
||||
|
||||
|
@ -19,6 +23,3 @@ API_KEYS=key1,key2,key3
|
|||
# This is the maximum file size that can be uploaded and the max file name length. '-1' is unlimited file size, not recommended.
|
||||
FILE_NAME_LENGTH=10
|
||||
FILE_MAX_SIZE_MB=30
|
||||
|
||||
# Your email address
|
||||
HOSTER_EMAIL=hoster@file.coffee
|
||||
|
|
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -7,6 +7,9 @@ yarn-error.log*
|
|||
lerna-debug.log*
|
||||
.pnpm-debug.log*
|
||||
|
||||
/tests/uploads/*
|
||||
!/tests/uploads/test.txt
|
||||
!/tests/uploads/.gitkeep
|
||||
|
||||
/uploads/*
|
||||
!/uploads/.gitkeep
|
||||
|
|
49
controllers/file.controller.js
Normal file
49
controllers/file.controller.js
Normal file
|
@ -0,0 +1,49 @@
|
|||
const initializeLocalStorage = require("../engines/local.engine");
|
||||
const initializeS3Storage = require("../engines/s3.engine");
|
||||
|
||||
const storageMode = process.env.STORAGE_MODE || "local";
|
||||
const fileNameLength = parseInt(process.env.FILE_NAME_LENGTH, 10) || 10;
|
||||
const multerOptions = {
|
||||
limits: parseInt(process.env.FILE_MAX_SIZE_MB, 10) * 1024 * 1024,
|
||||
};
|
||||
const publicMulterOptions = {
|
||||
limits: parseInt(process.env.PUBLIC_UPLOAD_SIZE_LIMIT, 10) * 1024 * 1024,
|
||||
};
|
||||
|
||||
let storageEngine;
|
||||
|
||||
if (storageMode === "local") {
|
||||
storageEngine = initializeLocalStorage(
|
||||
multerOptions,
|
||||
fileNameLength,
|
||||
process.env.LOCAL_UPLOAD_PATH,
|
||||
);
|
||||
} else if (storageMode === "s3") {
|
||||
const s3Config = {
|
||||
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
|
||||
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
|
||||
region: process.env.AWS_REGION,
|
||||
bucketName: process.env.S3_BUCKET_NAME,
|
||||
};
|
||||
storageEngine = initializeS3Storage(multerOptions, fileNameLength, s3Config);
|
||||
} else {
|
||||
throw new Error("Invalid STORAGE_MODE");
|
||||
}
|
||||
|
||||
const uploadFile = (req, res) => {
|
||||
storageEngine.writeFile(req, res, () => {
|
||||
const fileHostDomain =
|
||||
process.env.FILEHOST_DOMAIN || `${req.protocol}://${req.get("host")}`;
|
||||
res.status(200).json({
|
||||
message: "File uploaded successfully",
|
||||
url: `${fileHostDomain}/u/${req.filePath}`,
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const getFile = (req, res) => {
|
||||
const filename = req.params.filename;
|
||||
storageEngine.findFile(filename, res);
|
||||
};
|
||||
|
||||
module.exports = { uploadFile, getFile };
|
44
engines/local.engine.js
Normal file
44
engines/local.engine.js
Normal file
|
@ -0,0 +1,44 @@
|
|||
const multer = require("multer");
|
||||
const path = require("path");
|
||||
const mime = require("mime-types");
|
||||
const fs = require("fs");
|
||||
const { nanoid } = require("nanoid");
|
||||
|
||||
const initializeLocalStorage = (multerOptions, fileNameLength, uploadPath) => {
|
||||
const storage = multer.diskStorage({
|
||||
destination: (req, file, cb) => {
|
||||
cb(null, uploadPath);
|
||||
},
|
||||
filename: (req, file, cb) => {
|
||||
const ext = mime.extension(file.mimetype);
|
||||
const randomName = nanoid(fileNameLength);
|
||||
cb(null, `${randomName}.${ext}`);
|
||||
},
|
||||
});
|
||||
|
||||
const upload = multer({ storage: storage, ...multerOptions });
|
||||
|
||||
const writeFile = (req, res, next) => {
|
||||
upload.single("file")(req, res, (err) => {
|
||||
if (err) {
|
||||
return res.status(500).json({ error: err.message });
|
||||
}
|
||||
req.filePath = req.file.filename;
|
||||
next();
|
||||
});
|
||||
};
|
||||
|
||||
const findFile = (filename, res) => {
|
||||
const filePath = path.join(uploadPath, filename);
|
||||
fs.access(filePath, fs.constants.F_OK, (err) => {
|
||||
if (err) {
|
||||
return res.status(404).json({ error: "File not found" });
|
||||
}
|
||||
res.sendFile(filePath);
|
||||
});
|
||||
};
|
||||
|
||||
return { writeFile, findFile };
|
||||
};
|
||||
|
||||
module.exports = initializeLocalStorage;
|
57
engines/s3.engine.js
Normal file
57
engines/s3.engine.js
Normal file
|
@ -0,0 +1,57 @@
|
|||
const multer = require("multer");
|
||||
const mime = require("mime-types");
|
||||
const AWS = require("aws-sdk");
|
||||
const { nanoid } = require("nanoid");
|
||||
|
||||
const initializeS3Storage = (multerOptions, fileNameLength, s3Config) => {
|
||||
const s3 = new AWS.S3(s3Config);
|
||||
const storage = multer.memoryStorage();
|
||||
const upload = multer({ storage: storage, ...multerOptions });
|
||||
|
||||
const writeFile = (req, res, next) => {
|
||||
upload.single("file")(req, res, (err) => {
|
||||
if (err) {
|
||||
return res.status(500).json({ error: err.message });
|
||||
}
|
||||
const ext = mime.extension(req.file.mimetype);
|
||||
const randomName = nanoid(fileNameLength);
|
||||
const params = {
|
||||
Bucket: s3Config.bucketName,
|
||||
Key: `${randomName}.${ext}`,
|
||||
Body: req.file.buffer,
|
||||
ContentType: req.file.mimetype,
|
||||
};
|
||||
|
||||
s3.upload(params, (err, data) => {
|
||||
if (err) {
|
||||
return res.status(500).json({ error: err.message });
|
||||
}
|
||||
req.filePath = `${randomName}.${ext}`;
|
||||
next();
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const findFile = (filename, res) => {
|
||||
const params = {
|
||||
Bucket: s3Config.bucketName,
|
||||
Key: filename,
|
||||
};
|
||||
|
||||
s3.getObject(params, (err, data) => {
|
||||
if (err) {
|
||||
return res.status(404).json({ error: "File not found" });
|
||||
}
|
||||
res.writeHead(200, {
|
||||
"Content-Type": data.ContentType,
|
||||
"Content-Length": data.ContentLength,
|
||||
});
|
||||
res.write(data.Body);
|
||||
res.end();
|
||||
});
|
||||
};
|
||||
|
||||
return { writeFile, findFile };
|
||||
};
|
||||
|
||||
module.exports = initializeS3Storage;
|
141
index.js
141
index.js
|
@ -1,19 +1,12 @@
|
|||
// index.js
|
||||
require("dotenv").config();
|
||||
const express = require("express");
|
||||
const multer = require("multer");
|
||||
const AWS = require("aws-sdk");
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const mime = require("mime-types");
|
||||
const ejs = require("ejs");
|
||||
const fileRoutes = require("./routes/fileRoutes");
|
||||
|
||||
const app = express();
|
||||
const port = process.env.PORT || 3000;
|
||||
const storageMode = process.env.STORAGE_MODE || "local";
|
||||
const apiKeys = process.env.API_KEYS.split(",");
|
||||
const fileNameLength = parseInt(process.env.FILE_NAME_LENGTH, 10) || 10;
|
||||
const fileMaxSizeMB = parseInt(process.env.FILE_MAX_SIZE_MB, 10);
|
||||
const allowPublicUploads = process.env.ALLOW_PUBLIC_UPLOADS === "true";
|
||||
const hosterEmail = process.env.HOSTER_EMAIL;
|
||||
|
||||
let totalUploads = 0;
|
||||
|
@ -24,116 +17,28 @@ app.set("view engine", "ejs");
|
|||
const authenticate = (req, res, next) => {
|
||||
const apiKey = req.headers["x-api-key"];
|
||||
if (!apiKey || !apiKeys.includes(apiKey)) {
|
||||
return res.status(403).json({ error: "Forbidden" });
|
||||
}
|
||||
next();
|
||||
};
|
||||
|
||||
const multerOptions = {
|
||||
limits: fileMaxSizeMB === -1 ? {} : { fileSize: fileMaxSizeMB * 1024 * 1024 },
|
||||
};
|
||||
|
||||
let upload;
|
||||
|
||||
const initializeUpload = async () => {
|
||||
const { nanoid } = await import("nanoid");
|
||||
|
||||
if (storageMode === "local") {
|
||||
const storage = multer.diskStorage({
|
||||
destination: (req, file, cb) => {
|
||||
cb(null, process.env.LOCAL_UPLOAD_PATH);
|
||||
},
|
||||
filename: (req, file, cb) => {
|
||||
const ext = mime.extension(file.mimetype);
|
||||
const randomName = nanoid(fileNameLength);
|
||||
cb(null, `${randomName}.${ext}`);
|
||||
},
|
||||
});
|
||||
upload = multer({ storage: storage, ...multerOptions });
|
||||
} else if (storageMode === "s3") {
|
||||
const s3 = new AWS.S3({
|
||||
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
|
||||
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
|
||||
region: process.env.AWS_REGION,
|
||||
});
|
||||
|
||||
const storage = multer.memoryStorage();
|
||||
upload = multer({ storage: storage, ...multerOptions });
|
||||
|
||||
app.post("/upload", authenticate, upload.single("file"), (req, res) => {
|
||||
const ext = mime.extension(req.file.mimetype);
|
||||
const randomName = nanoid(fileNameLength);
|
||||
const params = {
|
||||
Bucket: process.env.S3_BUCKET_NAME,
|
||||
Key: `${randomName}.${ext}`,
|
||||
Body: req.file.buffer,
|
||||
ContentType: req.file.mimetype,
|
||||
};
|
||||
|
||||
s3.upload(params, (err, data) => {
|
||||
if (err) {
|
||||
return res.status(500).json({ error: err.message });
|
||||
}
|
||||
totalUploads++;
|
||||
totalSize += req.file.size / (1024 * 1024); // Convert bytes to MB
|
||||
res
|
||||
.status(200)
|
||||
.json({ message: "File uploaded successfully", url: data.Location });
|
||||
});
|
||||
});
|
||||
if (allowPublicUploads) {
|
||||
req.isPublicUpload = true;
|
||||
next();
|
||||
} else {
|
||||
return res.status(403).json({ error: "Forbidden" });
|
||||
}
|
||||
} else {
|
||||
throw new Error("Invalid STORAGE_MODE");
|
||||
next();
|
||||
}
|
||||
|
||||
if (storageMode === "local") {
|
||||
app.post("/upload", authenticate, upload.single("file"), (req, res) => {
|
||||
totalUploads++;
|
||||
totalSize += req.file.size / (1024 * 1024); // Convert bytes to MB
|
||||
res
|
||||
.status(200)
|
||||
.json({ message: "File uploaded successfully", path: req.file.path });
|
||||
});
|
||||
|
||||
app.get("/files/:filename", (req, res) => {
|
||||
const filePath = path.join(
|
||||
__dirname,
|
||||
process.env.LOCAL_UPLOAD_PATH,
|
||||
req.params.filename,
|
||||
);
|
||||
res.sendFile(filePath);
|
||||
});
|
||||
} else if (storageMode === "s3") {
|
||||
app.get("/files/:filename", (req, res) => {
|
||||
const params = {
|
||||
Bucket: process.env.S3_BUCKET_NAME,
|
||||
Key: req.params.filename,
|
||||
};
|
||||
|
||||
s3.getObject(params, (err, data) => {
|
||||
if (err) {
|
||||
return res.status(500).json({ error: err.message });
|
||||
}
|
||||
res.writeHead(200, {
|
||||
"Content-Type": data.ContentType,
|
||||
"Content-Length": data.ContentLength,
|
||||
});
|
||||
res.write(data.Body);
|
||||
res.end();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
app.get("/", (req, res) => {
|
||||
res.render("index", {
|
||||
totalUploads: totalUploads,
|
||||
totalSize: totalSize.toFixed(2), // Format to 2 decimal places
|
||||
hosterEmail: hosterEmail,
|
||||
});
|
||||
});
|
||||
|
||||
app.listen(port, () => {
|
||||
console.log(`Server is running on port ${port}`);
|
||||
});
|
||||
};
|
||||
|
||||
initializeUpload();
|
||||
app.use(authenticate);
|
||||
app.use(fileRoutes);
|
||||
|
||||
app.get("/", (req, res) => {
|
||||
res.render("index", {
|
||||
totalUploads: totalUploads,
|
||||
totalSize: totalSize.toFixed(2), // Format to 2 decimal places
|
||||
hosterEmail: hosterEmail,
|
||||
});
|
||||
});
|
||||
|
||||
app.listen(port, () => {
|
||||
console.log(`Server is running on port ${port}`);
|
||||
});
|
||||
|
|
5008
package-lock.json
generated
5008
package-lock.json
generated
File diff suppressed because it is too large
Load diff
18
package.json
18
package.json
|
@ -1,4 +1,13 @@
|
|||
{
|
||||
"scripts": {
|
||||
"test": "jest"
|
||||
},
|
||||
"jest": {
|
||||
"transform": {
|
||||
"^.+\\.js$": "babel-jest"
|
||||
},
|
||||
"testEnvironment": "node"
|
||||
},
|
||||
"dependencies": {
|
||||
"aws-sdk": "^2.1632.0",
|
||||
"dotenv": "^16.4.5",
|
||||
|
@ -6,6 +15,13 @@
|
|||
"express": "^4.19.2",
|
||||
"mime-types": "^2.1.35",
|
||||
"multer": "^1.4.5-lts.1",
|
||||
"nanoid": "^5.0.7"
|
||||
"nanoid": "^3.3.7"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.24.6",
|
||||
"@babel/preset-env": "^7.24.6",
|
||||
"babel-jest": "^29.7.0",
|
||||
"jest": "^29.7.0",
|
||||
"supertest": "^7.0.0"
|
||||
}
|
||||
}
|
||||
|
|
9
routes/file.routes.js
Normal file
9
routes/file.routes.js
Normal file
|
@ -0,0 +1,9 @@
|
|||
const express = require("express");
|
||||
const { uploadFile, getFile } = require("../controllers/file.controller");
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.post("/upload", uploadFile);
|
||||
router.get("/u/:filename", getFile);
|
||||
|
||||
module.exports = router;
|
58
tests/local.test.js
Normal file
58
tests/local.test.js
Normal file
|
@ -0,0 +1,58 @@
|
|||
const request = require("supertest");
|
||||
const express = require("express");
|
||||
const path = require("path");
|
||||
const fs = require("fs");
|
||||
const initializeLocalStorage = require("../engines/local.engine");
|
||||
const { uploadFile, getFile } = require("../controllers/file.controller");
|
||||
|
||||
const app = express();
|
||||
const uploadPath = path.join(__dirname, "uploads");
|
||||
const multerOptions = { limits: { fileSize: 1024 * 1024 } }; // 1MB limit
|
||||
const publicMulterOptions = { limits: { fileSize: 512 * 1024 } }; // 512KB limit
|
||||
const fileNameLength = 10;
|
||||
|
||||
if (!fs.existsSync(uploadPath)) {
|
||||
fs.mkdirSync(uploadPath);
|
||||
}
|
||||
|
||||
const storageEngine = initializeLocalStorage(
|
||||
multerOptions,
|
||||
fileNameLength,
|
||||
uploadPath,
|
||||
);
|
||||
|
||||
app.post("/upload", (req, res) => {
|
||||
storageEngine.writeFile(req, res, () => {
|
||||
res.status(200).json({
|
||||
message: "File uploaded successfully",
|
||||
url: `http://localhost:3000/u/${req.filePath}`,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
app.get("/u/:filename", (req, res) => {
|
||||
storageEngine.findFile(req.params.filename, res);
|
||||
});
|
||||
|
||||
describe("Local Storage Engine", () => {
|
||||
it("should upload a file successfully", async () => {
|
||||
const response = await request(app)
|
||||
.post("/upload")
|
||||
.attach("file", Buffer.from("test file content"), "test.txt")
|
||||
.set("x-api-key", "valid-api-key");
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.message).toBe("File uploaded successfully");
|
||||
expect(response.body.url).toMatch(/http:\/\/localhost:3000\/u\/.+\.txt/);
|
||||
});
|
||||
|
||||
it("should retrieve a file successfully", async () => {
|
||||
const filePath = path.join(uploadPath, "test.txt");
|
||||
fs.writeFileSync(filePath, "test file content");
|
||||
|
||||
const response = await request(app).get("/u/test.txt");
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.text).toBe("test file content");
|
||||
});
|
||||
});
|
68
tests/s3.test.js
Normal file
68
tests/s3.test.js
Normal file
|
@ -0,0 +1,68 @@
|
|||
const request = require("supertest");
|
||||
const express = require("express");
|
||||
const AWS = require("aws-sdk");
|
||||
const initializeS3Storage = require("../engines/s3.engine");
|
||||
const { uploadFile, getFile } = require("../controllers/file.controller");
|
||||
|
||||
const app = express();
|
||||
const multerOptions = { limits: { fileSize: 1024 * 1024 } }; // 1MB limit
|
||||
const publicMulterOptions = { limits: { fileSize: 512 * 1024 } }; // 512KB limit
|
||||
const fileNameLength = 10;
|
||||
|
||||
const s3Config = {
|
||||
accessKeyId: "fake-access-key-id",
|
||||
secretAccessKey: "fake-secret-access-key",
|
||||
region: "fake-region",
|
||||
bucketName: "fake-bucket-name",
|
||||
};
|
||||
|
||||
AWS.S3.prototype.upload = jest.fn((params, callback) => {
|
||||
callback(null, { Location: `https://fake-s3-url/${params.Key}` });
|
||||
});
|
||||
|
||||
AWS.S3.prototype.getObject = jest.fn((params, callback) => {
|
||||
callback(null, {
|
||||
ContentType: "text/plain",
|
||||
ContentLength: 17,
|
||||
Body: Buffer.from("test file content"),
|
||||
});
|
||||
});
|
||||
|
||||
const storageEngine = initializeS3Storage(
|
||||
multerOptions,
|
||||
fileNameLength,
|
||||
s3Config,
|
||||
);
|
||||
|
||||
app.post("/upload", (req, res) => {
|
||||
storageEngine.writeFile(req, res, () => {
|
||||
res.status(200).json({
|
||||
message: "File uploaded successfully",
|
||||
url: `http://localhost:3000/u/${req.filePath}`,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
app.get("/u/:filename", (req, res) => {
|
||||
storageEngine.findFile(req.params.filename, res);
|
||||
});
|
||||
|
||||
describe("S3 Storage Engine", () => {
|
||||
it("should upload a file successfully", async () => {
|
||||
const response = await request(app)
|
||||
.post("/upload")
|
||||
.attach("file", Buffer.from("test file content"), "test.txt")
|
||||
.set("x-api-key", "valid-api-key");
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.message).toBe("File uploaded successfully");
|
||||
expect(response.body.url).toMatch(/http:\/\/localhost:3000\/u\/.+\.txt/);
|
||||
});
|
||||
|
||||
it("should retrieve a file successfully", async () => {
|
||||
const response = await request(app).get("/u/test.txt");
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.text).toBe("test file content");
|
||||
});
|
||||
});
|
0
tests/uploads/.gitkeep
Normal file
0
tests/uploads/.gitkeep
Normal file
1
tests/uploads/test.txt
Normal file
1
tests/uploads/test.txt
Normal file
|
@ -0,0 +1 @@
|
|||
test file content
|
Loading…
Reference in a new issue