diff --git a/Dockerfile b/Dockerfile
index 07b4cac..dfe62da 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -67,4 +67,5 @@ ENV PORT 3020
# server.js is created by next build from the standalone output
# https://nextjs.org/docs/pages/api-reference/next-config-js/output
# 启动服务
-CMD ["node", "server.js"]
\ No newline at end of file
+CMD ["node", "server.js"]
+# CMD ["sh", "-c", "node .js && node server.js"]
\ No newline at end of file
diff --git a/elog-doge-upload.js b/elog-doge-upload.js
index a299013..1a1f95b 100644
--- a/elog-doge-upload.js
+++ b/elog-doge-upload.js
@@ -5,7 +5,6 @@ const {
} = require("@aws-sdk/client-s3");
// 节省体积,只引入 S3 服务(推荐)
const S3 = require("aws-sdk/clients/s3");
-// const dogecloudApi = require("./lib/doge-sdk"); // 请替换为正确的dogecloudApi函数路径
var axios = require("axios");
var crypto = require("crypto");
var querystring = require("querystring");
diff --git a/next.config.js b/next.config.js
index 5d90e55..d49e8e3 100644
--- a/next.config.js
+++ b/next.config.js
@@ -1,7 +1,10 @@
const { withContentlayer } = require("next-contentlayer2");
-
+const NextOSSPlugin = require("next-oss-webpack-plugin");
+const isProd = process.env.NODE_ENV === "production";
/** @type {import('next').NextConfig} */
const nextConfig = {
+ // 配置 CDN 地址
+ // assetPrefix: isProd ? "https://cdn.example.com/offernow/" : "",
swcMinify: true,
output: "standalone",
pageExtensions: ["ts", "tsx", "js", "jsx", "md", "mdx"],
@@ -36,11 +39,29 @@ const nextConfig = {
},
],
},
- webpack: (config) => {
+ webpack: (config, { buildId }) => {
config.module.rules.push({
test: /\.svg$/,
use: ["@svgr/webpack"],
});
+
+ // if (isProd && buildId) {
+ // config.plugins.push(
+ // new NextOSSPlugin({
+ // region: "oss-cn-hangzhou", // bucket所在区域
+ // accessKeyId: process.env.R2_ACCESSKEYI,
+ // accessKeySecret: process.env.R2_SECRET_ACCESSKEY,
+ // bucket: process.env.R2_BUCKET,
+
+ // filter: (assert) => /\.js$/.test(assert),
+ // assetPrefix: `${assetPrefix}/_next/`, // 上传资源前缀
+ // customizedOssPaths: [
+ // // 替换为 /:buildId/page/xxx.js ,使能正常访问
+ // { pattern: /bundles\/pages/g, replace: `${buildId}/page` },
+ // ],
+ // })
+ // );
+ // }
return config;
},
};
diff --git a/package.json b/package.json
index b909048..26340fa 100644
--- a/package.json
+++ b/package.json
@@ -36,6 +36,7 @@
"cross-env": "^7.0.3",
"crypto": "^1.0.1",
"dayjs": "^1.11.11",
+ "dotenv": "^16.4.5",
"embla-carousel-react": "^8.0.4",
"esbuild": "^0.21.1",
"flexsearch": "^0.7.43",
@@ -43,8 +44,10 @@
"hast-util-from-html-isomorphic": "^2.0.0",
"lucide-react": "^0.378.0",
"mdx": "^0.3.1",
+ "mime-types": "^2.1.35",
"next": "14.2.8",
"next-contentlayer2": "0.4.6",
+ "next-oss-webpack-plugin": "^1.0.0",
"next-themes": "^0.3.0",
"pliny": "^0.2.1",
"react": "^18.3.1",
@@ -53,6 +56,7 @@
"react-icons": "^4.12.0",
"react-markdown": "^9.0.1",
"react-syntax-highlighter": "^15.5.0",
+ "recursive-readdir": "^2.2.3",
"rehype-autolink-headings": "^7.1.0",
"rehype-citation": "^2.0.0",
"rehype-katex": "^7.0.0",
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index a345473..27c602a 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -77,6 +77,9 @@ importers:
dayjs:
specifier: ^1.11.11
version: 1.11.13
+ dotenv:
+ specifier: ^16.4.5
+ version: 16.4.5
embla-carousel-react:
specifier: ^8.0.4
version: 8.3.0(react@18.3.1)
@@ -98,12 +101,18 @@ importers:
mdx:
specifier: ^0.3.1
version: 0.3.1
+ mime-types:
+ specifier: ^2.1.35
+ version: 2.1.35
next:
specifier: 14.2.8
version: 14.2.8(@babel/core@7.25.2)(@opentelemetry/api@1.9.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
next-contentlayer2:
specifier: 0.4.6
version: 0.4.6(contentlayer2@0.4.6(esbuild@0.21.5))(esbuild@0.21.5)(next@14.2.8(@babel/core@7.25.2)(@opentelemetry/api@1.9.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
+ next-oss-webpack-plugin:
+ specifier: ^1.0.0
+ version: 1.0.0(proxy-agent@5.0.0)
next-themes:
specifier: ^0.3.0
version: 0.3.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
@@ -128,6 +137,9 @@ importers:
react-syntax-highlighter:
specifier: ^15.5.0
version: 15.5.0(react@18.3.1)
+ recursive-readdir:
+ specifier: ^2.2.3
+ version: 2.2.3
rehype-autolink-headings:
specifier: ^7.1.0
version: 7.1.0
@@ -2958,6 +2970,9 @@ packages:
resolution: {integrity: sha512-LElXdjswlqjWrPpJFg1Fx4wpkOCxj1TDHlSV4PlaRxHGWko024xICaa97ZkMfs6DRKlCguiAI+rbXv5GWwXIkg==}
hasBin: true
+ async@2.6.4:
+ resolution: {integrity: sha512-mzo5dfJYwAn29PeiJ0zvwTo04zj8HDJj0Mn8TD7sno7q12prdbnasKJHhkm2c1LgrhlJ0teaea8860oxi51mGA==}
+
asynckit@0.4.0:
resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==}
@@ -5319,6 +5334,9 @@ packages:
react: '*'
react-dom: '*'
+ next-oss-webpack-plugin@1.0.0:
+ resolution: {integrity: sha512-uyH7r+YIWt5L7OgKfJKOefrXqpK8tvm7jlrJHT9cefNhksmxSx+AJ/LK707dR42SuDMdO8ssjPy+xaBB9phBgQ==}
+
next-themes@0.3.0:
resolution: {integrity: sha512-/QHIrsYpd6Kfk7xakK4svpDI5mmXP0gfvCoJdGpZQ2TOrQZmsW0QxjaiLn8wbIKjtm4BTSqLoix4lxYYOnLJ/w==}
peerDependencies:
@@ -5957,6 +5975,10 @@ packages:
resolution: {integrity: sha512-HFM8rkZ+i3zrV+4LQjwQ0W+ez98pApMGM3HUrN04j3CqzPOzl9nmP15Y8YXNm8QHGv/eacOVEjqhmWpkRV0NAw==}
engines: {node: '>= 0.10'}
+ recursive-readdir@2.2.3:
+ resolution: {integrity: sha512-8HrF5ZsXk5FAH9dgsx3BlUer73nIhuj+9OrQwEbLTPOBzGkL1lsFCR01am+v+0m2Cmbs1nP12hLDl5FA7EszKA==}
+ engines: {node: '>=6.0.0'}
+
redent@1.0.0:
resolution: {integrity: sha512-qtW5hKzGQZqKoh6JNSD+4lfitfPKGz42e6QwiRmPM5mmKtR0N41AbJRYu0xJi7nhOJ4WDgRkKvAk6tw4WIwR4g==}
engines: {node: '>=0.10.0'}
@@ -10716,6 +10738,10 @@ snapshots:
astring@1.9.0: {}
+ async@2.6.4:
+ dependencies:
+ lodash: 4.17.21
+
asynckit@0.4.0: {}
atomically@1.7.0: {}
@@ -13731,6 +13757,14 @@ snapshots:
- markdown-wasm
- supports-color
+ next-oss-webpack-plugin@1.0.0(proxy-agent@5.0.0):
+ dependencies:
+ ali-oss: 6.21.0(proxy-agent@5.0.0)
+ async: 2.6.4
+ transitivePeerDependencies:
+ - proxy-agent
+ - supports-color
+
next-themes@0.3.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1):
dependencies:
react: 18.3.1
@@ -14423,6 +14457,10 @@ snapshots:
dependencies:
resolve: 1.22.8
+ recursive-readdir@2.2.3:
+ dependencies:
+ minimatch: 3.1.2
+
redent@1.0.0:
dependencies:
indent-string: 2.1.0
diff --git a/public/sitemap.xml b/public/sitemap.xml
index 52411e8..9844697 100644
--- a/public/sitemap.xml
+++ b/public/sitemap.xml
@@ -976,7 +976,7 @@
https://offernow.cn/s/language/vue3/ebxce1ws74y0t8tq
weekly
0.5
- 2024-09-30T07:10:17.986Z
+ 2024-09-30T12:51:31.440Z
https://offernow.cn/s/language/vue3/ecpzrwpgo9ugf2bb
@@ -1774,7 +1774,7 @@
https://offernow.cn/s/cs_base/os/hand_os/vpcarog0vdzlbgx3
weekly
0.5
- 2024-09-30T07:10:17.988Z
+ 2024-09-30T12:51:31.443Z
https://offernow.cn/s/cs_base/os/hand_os/ho88cbpxgbc2gpnf
@@ -2710,7 +2710,7 @@
https://offernow.cn/s/interview/chat_view/20244/lgawhfx8pq2slyko
weekly
0.5
- 2024-09-30T07:10:17.991Z
+ 2024-09-30T12:51:31.446Z
https://offernow.cn/s/interview/chat_view/20244/iv993phgec0ybt41
diff --git a/script/auto-flush.py b/script/auto-flush.py
new file mode 100644
index 0000000..1aa72b4
--- /dev/null
+++ b/script/auto-flush.py
@@ -0,0 +1,34 @@
+# -*- coding: utf-8 -*-
+
+# 自动刷新网站
+# 可以使用conjob 定时运行python代码,或者使用宝塔面板进行处理
+
+
+import requests
+
+# 定义 API 的 URL
+url = "https://serverless-api-elog.vercel.app/api/github"
+
+# 定义查询参数
+params = {
+ "user": "**",
+ "repo": "**",
+ "event_type": "sync",
+ "token": "**"
+}
+
+
+
+# 发送 GET 请求
+response = requests.get(url, params=params)
+
+# 检查请求是否成功
+if response.status_code == 200:
+ # 打印响应内容
+ print("Response JSON:")
+ print(response.json())
+else:
+ # 打印错误信息
+ print(f"Request failed with status code {response.status_code}")
+ print("Response text:")
+ print(response.text)
\ No newline at end of file
diff --git a/script/baidu-seo-flush.py b/script/baidu-seo-flush.py
new file mode 100644
index 0000000..255092a
--- /dev/null
+++ b/script/baidu-seo-flush.py
@@ -0,0 +1,108 @@
+import random
+import requests
+from bs4 import BeautifulSoup
+import time
+
+# 假设你的 sitemap 文件是一个 XML 文件,并且 URL 在 标签中
+sitemap_url = 'https://offernow.cn/sitemap.xml'
+
+# 记录已处理URL的文件
+processed_urls_file = 'processed_urls.txt'
+
+def get_urls_from_sitemap(sitemap_url):
+ """从 sitemap 中提取所有的 URL"""
+ response = requests.get(sitemap_url)
+ soup = BeautifulSoup(response.content, 'xml')
+ urls = [loc.text for loc in soup.find_all('loc')]
+ return urls
+
+def submit_urls_to_baidu(urls, token):
+ """
+ 提交URL数组到百度的普通收录工具
+
+ 参数:
+ urls (list): 要提交的URL列表
+ site (str): 在搜索资源平台验证的站点
+ token (str): 在搜索资源平台申请的推送用的准入密钥
+
+ 返回:
+ dict: 返回百度接口的响应
+ """
+ api_url = f"http://data.zz.baidu.com/urls?site=https://offernow.cn&token={token}"
+ headers = {
+ 'Content-Type': 'text/plain'
+ }
+ data = "\n".join(urls)
+
+ response = requests.post(api_url, headers=headers, data=data)
+
+ if response.status_code == 200:
+ return response.json()
+ else:
+ print(f"Error: {response.status_code}")
+ return None
+
+def load_processed_urls():
+ """从文件中加载已处理的URL列表"""
+ try:
+ with open(processed_urls_file, 'r') as f:
+ return set(f.read().splitlines())
+ except FileNotFoundError:
+ return set()
+
+def save_processed_url(url):
+ print("保存已处理的URL")
+ """将已处理的URL保存到文件中"""
+ with open(processed_urls_file, 'a') as f:
+ f.write(url + '\n')
+ print(f"Saved URL: {url}") # 调试信息
+
+def clear_processed_urls():
+ """清空已处理的URL文件"""
+ with open(processed_urls_file, 'w') as f:
+ f.write('')
+
+# 示例使用
+token = "M0sfIGdkUod4leN9"
+
+def main():
+ urls = get_urls_from_sitemap(sitemap_url)
+ random.shuffle(urls) # 随机排序URL列表
+ processed_urls = load_processed_urls()
+ num = 0
+
+ for url in urls:
+ if url in processed_urls:
+ print(f"URL {url} 已处理过,跳过...")
+ continue # 跳过已处理的URL
+
+ num += 1
+ curUrl = {url}
+ # 这里你可以按照你的需要处理每个页面的内容
+ start_time = time.time() # 请求前时间
+
+ response = submit_urls_to_baidu(curUrl, token)
+
+ elapsed_time = time.time() - start_time # 请求后时间
+ print(f"第{num}次,内容 {url},耗时 {elapsed_time:.2f} 秒:")
+
+ if response:
+ print(f"推送成功的URL条数: {response.get('success')}")
+ print(f"当天剩余可推送的URL条数: {response.get('remain')}")
+ print(f"不是本站的URL: {response.get('not_same_site')}")
+ print(f"不合法的URL: {response.get('not_valid')}")
+
+ save_processed_url(url) # 保存已处理的URL
+
+ if num >= 10:
+ break
+
+ # 检查是否所有URL都已处理完毕, 清空已处理的URL文件
+ # 通过比较两个集合的差异来判断是否所有URL都已处理完毕
+ # urls所有都在processed_urls中,才清空
+ if not set(urls) - processed_urls:
+ clear_processed_urls()
+ print("所有URL都已处理完毕,已清空已处理的URL文件。")
+
+if __name__ == '__main__':
+ main()
\ No newline at end of file
diff --git a/script/upload_doge.js b/script/upload_doge.js
new file mode 100644
index 0000000..0044371
--- /dev/null
+++ b/script/upload_doge.js
@@ -0,0 +1,191 @@
+require("dotenv").config({ path: ".env.local" });
+// 节省体积,只引入 S3 服务(推荐)
+const S3 = require("aws-sdk/clients/s3");
+const fs = require("fs");
+const readDir = require("recursive-readdir");
+const path = require("path");
+const mime = require("mime-types");
+const version = require("../package.json").version;
+
+var axios = require("axios");
+var crypto = require("crypto");
+var querystring = require("querystring");
+// Retrive all the files path in the build directory
+const getDirectoryFilesRecursive = (dir, ignores = []) => {
+ return new Promise((resolve, reject) => {
+ readDir(dir, ignores, (err, files) => (err ? reject(err) : resolve(files)));
+ });
+};
+
+// key看起来会像这样: _next/public//pages/index.js
+// 是每次nextJS 部署时生成的unique id
+// 参考: [https://nextjs.org/blog/next-7/#static-cdn-support](https://nextjs.org/blog/next-7/#static-cdn-support)
+const generateFileKey = (fileName, toReplace, replaced) => {
+ const S3objectPath = fileName.split(toReplace)[1];
+ console.log("S3objectPath", S3objectPath);
+
+ return version + replaced + S3objectPath;
+};
+
+/**
+
+ ● 处理前缀,结尾自动加上/
+ ● @param prefix
+ ● @return {*|string}
+ */
+const formattedPrefix = (prefix) => {
+ // 如果没传,则默认为空
+ if (!prefix) return "";
+ let _prefix = prefix;
+ // 如果开头无需/
+ if (_prefix.startsWith("/")) {
+ prefix = prefix.slice(1);
+ }
+ // 如果结尾需要/
+ if (!_prefix.endsWith("/")) {
+ _prefix = `${_prefix}/`;
+ }
+ return _prefix;
+};
+
+const uploadToS3 = async (fileArray, toReplace, replaced) => {
+ try {
+ console.log("init start");
+ await dogecloudApi(
+ "/auth/tmp_token.json",
+ {
+ channel: "OSS_FULL",
+ scopes: ["*"],
+ },
+ true,
+ function (err, data) {
+ if (err) {
+ console.log(err.Error);
+ return;
+ }
+ try {
+ // 这里推荐使用 Redis 之类的缓存将获取到的临时密钥缓存下来,两小时内有效
+ const client = new S3({
+ region: "automatic",
+ endpoint: process.env.R2_ENDPOINT,
+ credentials: data.Credentials,
+ params: {
+ Bucket: process.env.R2_BUCKET,
+ },
+ });
+
+ console.log("init end");
+
+ fileArray.map(async (file) => {
+ // 配置s3对象参数
+ // const S3params = {
+ // Bucket: process.env.AWS_S3_BUCKET_NAME,
+ // Body: fs.createReadStream(file),
+ // Key: generateFileKey(file, toReplace, replaced),
+ // ACL: "public-read",
+ // ContentType: String(mime.lookup(file)),
+ // ContentEncoding: "utf-8",
+ // CacheControl: "immutable,max-age=31536000,public",
+ // };
+
+ const params = {
+ Bucket: process.env.R2_BUCKET,
+ Key: generateFileKey(file, toReplace, replaced),
+ Body: fs.createReadStream(file),
+ ACL: "public-read",
+ ContentType: String(mime.lookup(file)),
+ ContentEncoding: "utf-8",
+ };
+
+ console.log("uploading", params.Key);
+ try {
+ await client.putObject(params, function (err, data) {
+ if (err) console.error("uploadImg", err, err.stack);
+ process.exitCode = 1;
+ // else console.error(data);
+ });
+ } catch (e) {
+ console.error("uploadImg", e, e.stack);
+ }
+ });
+ } catch (e) {
+ console.error("初始化出错", err.message);
+ }
+ }
+ );
+ } catch (e) {
+ console.error("init初始化出错", e.message);
+ }
+};
+
+function dogecloudApi(apiPath, data = {}, jsonMode = false, callback = null) {
+ // 这里替换为你的多吉云永久 AccessKey 和 SecretKey,可在用户中心 - 密钥管理中查看
+ // 请勿在客户端暴露 AccessKey 和 SecretKey,那样恶意用户将获得账号完全控制权
+ const accessKey = process.env.R2_ACCESSKEYID;
+ const secretKey = process.env.R2_SECRET_ACCESSKEY;
+
+ const body = jsonMode ? JSON.stringify(data) : querystring.encode(data);
+ const sign = crypto
+ .createHmac("sha1", secretKey)
+ .update(Buffer.from(apiPath + "\n" + body, "utf8"))
+ .digest("hex");
+ const authorization = "TOKEN " + accessKey + ":" + sign;
+
+ return new Promise(function (resolve, reject) {
+ try {
+ axios
+ .request({
+ url: "https://api.dogecloud.com" + apiPath,
+ method: "POST",
+ data: body,
+ responseType: "json",
+ headers: {
+ "Content-Type": jsonMode
+ ? "application/json"
+ : "application/x-www-form-urlencoded",
+ Authorization: authorization,
+ },
+ })
+ .then(function (response) {
+ if (response.data.code !== 200) {
+ // API 返回错误
+ callback
+ ? callback({ Error: "API Error: " + response.data.msg }, null)
+ : reject({
+ errno: response.data.code,
+ msg: "API Error: " + response.data.msg,
+ });
+ return;
+ }
+ callback
+ ? callback(null, response.data.data)
+ : resolve(response.data.data);
+ })
+ .catch(function (err) {
+ callback ? callback(err, null) : reject(err);
+ });
+ } catch (error) {
+ callback ? callback(error, null) : reject(err);
+ }
+ });
+}
+
+//递归获取文件
+const start = async function (dict) {
+ for (var i = 0; i < dict.length; i++) {
+ const files = await getDirectoryFilesRecursive(
+ path.resolve(__dirname, dict[i].filePath),
+ [".DS_Store", "BUILD_ID"]
+ );
+ uploadToS3(files, dict[i].toReplace, dict[i].replaced);
+ }
+};
+
+// 调用 start 方法
+start([
+ {
+ filePath: "../.next/static/",
+ toReplace: ".next/",
+ replaced: "/_next/",
+ },
+]);