Skip to content

Commit

Permalink
Merge pull request #1333 from DIYgod/master
Browse files Browse the repository at this point in the history
[pull] master from diygod:master
  • Loading branch information
pull[bot] authored Apr 1, 2024
2 parents ef5bc1a + ac337c1 commit 6e6a962
Show file tree
Hide file tree
Showing 18 changed files with 923 additions and 347 deletions.
19 changes: 0 additions & 19 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ services:
CACHE_TYPE: redis
REDIS_URL: 'redis://redis:6379/'
PUPPETEER_WS_ENDPOINT: 'ws://browserless:3000' # marked
PROXY_URI: 'socks5h://warp-socks:9091'
depends_on:
- redis
- browserless # marked
Expand All @@ -33,23 +32,5 @@ services:
volumes:
- redis-data:/data

warp-socks:
image: monius/docker-warp-socks:latest
privileged: true
restart: always
volumes:
- /lib/modules:/lib/modules
cap_add:
- NET_ADMIN
- SYS_MODULE
sysctls:
net.ipv6.conf.all.disable_ipv6: 0
net.ipv4.conf.all.src_valid_mark: 1
healthcheck:
test: ["CMD", "curl", "-f", "https://www.cloudflare.com/cdn-cgi/trace"]
interval: 30s
timeout: 10s
retries: 5

volumes:
redis-data:
2 changes: 1 addition & 1 deletion lib/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -348,7 +348,7 @@ const calculateValue = () => {
allowOrigin: envs.ALLOW_ORIGIN,
// cache
cache: {
type: envs.CACHE_TYPE || 'memory', // 缓存类型,支持 'memory' 和 'redis',设为空可以禁止缓存
type: envs.CACHE_TYPE || (envs.CACHE_TYPE === '' ? '' : 'memory'), // 缓存类型,支持 'memory' 和 'redis',设为空可以禁止缓存
requestTimeout: toInt(envs.CACHE_REQUEST_TIMEOUT, 60),
routeExpire: toInt(envs.CACHE_EXPIRE, 5 * 60), // 路由缓存时间,单位为秒
contentExpire: toInt(envs.CACHE_CONTENT_EXPIRE, 1 * 60 * 60), // 不变内容缓存时间,单位为秒
Expand Down
86 changes: 47 additions & 39 deletions lib/middleware/cache.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,24 @@ afterEach(() => {
vi.resetModules();
});

const noCacheTestFunc = async () => {
const app = (await import('@/app')).default;

const response1 = await app.request('/test/cache');
const response2 = await app.request('/test/cache');

const parsed1 = await parser.parseString(await response1.text());
const parsed2 = await parser.parseString(await response2.text());

expect(response2.status).toBe(200);
expect(response2.headers).not.toHaveProperty('rsshub-cache-status');

expect(parsed1.items[0].content).toBe('Cache1');
expect(parsed2.items[0].content).toBe('Cache2');

expect(parsed1.ttl).toEqual('1');
};

describe('cache', () => {
it('memory', async () => {
process.env.CACHE_TYPE = 'memory';
Expand All @@ -33,6 +51,8 @@ describe('cache', () => {
expect(response2.status).toBe(200);
expect(response2.headers.get('rsshub-cache-status')).toBe('HIT');

expect(parsed1.ttl).toEqual('1');

await wait(1 * 1000 + 100);
const response3 = await app.request('/test/cache');
expect(response3.headers).not.toHaveProperty('rsshub-cache-status');
Expand Down Expand Up @@ -81,6 +101,8 @@ describe('cache', () => {
expect(response2.status).toBe(200);
expect(response2.headers.get('rsshub-cache-status')).toBe('HIT');

expect(parsed1.ttl).toEqual('1');

await wait(1 * 1000 + 100);
const response3 = await app.request('/test/cache');
expect(response3.headers).not.toHaveProperty('rsshub-cache-status');
Expand Down Expand Up @@ -114,57 +136,25 @@ describe('cache', () => {
process.env.CACHE_TYPE = 'redis';
const cache = (await import('@/utils/cache')).default;
await cache.clients.redisClient!.quit();
const app = (await import('@/app')).default;

const response1 = await app.request('/test/cache');
const response2 = await app.request('/test/cache');

const parsed1 = await parser.parseString(await response1.text());
const parsed2 = await parser.parseString(await response2.text());

expect(response2.status).toBe(200);
expect(response2.headers).not.toHaveProperty('rsshub-cache-status');

expect(parsed1.items[0].content).toBe('Cache1');
expect(parsed2.items[0].content).toBe('Cache2');
await noCacheTestFunc();
});

it('redis with error', async () => {
process.env.CACHE_TYPE = 'redis';
process.env.REDIS_URL = 'redis://wrongpath:6379';
const app = (await import('@/app')).default;

const response1 = await app.request('/test/cache');
const response2 = await app.request('/test/cache');

const parsed1 = await parser.parseString(await response1.text());
const parsed2 = await parser.parseString(await response2.text());

expect(response2.status).toBe(200);
expect(response2.headers).not.toHaveProperty('rsshub-cache-status');

expect(parsed1.items[0].content).toBe('Cache1');
expect(parsed2.items[0].content).toBe('Cache2');

await noCacheTestFunc();
const cache = (await import('@/utils/cache')).default;
await cache.clients.redisClient!.quit();
});

it('no cache', async () => {
process.env.CACHE_TYPE = 'NO';
const app = (await import('@/app')).default;

const response1 = await app.request('/test/cache');
const response2 = await app.request('/test/cache');

const parsed1 = await parser.parseString(await response1.text());
const parsed2 = await parser.parseString(await response2.text());

expect(response2.status).toBe(200);
expect(response2.headers).not.toHaveProperty('rsshub-cache-status');
await noCacheTestFunc();
});

expect(parsed1.items[0].content).toBe('Cache1');
expect(parsed2.items[0].content).toBe('Cache2');
it('no cache (empty string)', async () => {
process.env.CACHE_TYPE = '';
await noCacheTestFunc();
});

it('throws URL key', async () => {
Expand All @@ -178,4 +168,22 @@ describe('cache', () => {
expect(error.message).toContain('Cache key must be a string');
}
});

it('RSS TTL (no cache)', async () => {
process.env.CACHE_TYPE = '';
process.env.CACHE_EXPIRE = '600';
const app = (await import('@/app')).default;
const response = await app.request('/test/cache');
const parsed = await parser.parseString(await response.text());
expect(parsed.ttl).toEqual('1');
});

it('RSS TTL (w/ cache)', async () => {
process.env.CACHE_TYPE = 'memory';
process.env.CACHE_EXPIRE = '600';
const app = (await import('@/app')).default;
const response = await app.request('/test/cache');
const parsed = await parser.parseString(await response.text());
expect(parsed.ttl).toEqual('10');
});
});
9 changes: 8 additions & 1 deletion lib/middleware/template.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,13 @@ import { Data } from '@/types';
import { getCurrentPath } from '@/utils/helpers';
const __dirname = getCurrentPath(import.meta.url);

// Set RSS <ttl> (minute) according to the availability of cache
// * available: max(config.cache.routeExpire / 60, 1)
// * unavailable: 1
// The minimum <ttl> is limited to 1 minute to prevent potential misuse
import cacheModule from '@/utils/cache/index';
const ttl = (cacheModule.status.available && Math.trunc(config.cache.routeExpire / 60)) || 1;

const middleware: MiddlewareHandler = async (ctx, next) => {
await next();

Expand Down Expand Up @@ -74,7 +81,7 @@ const middleware: MiddlewareHandler = async (ctx, next) => {
const result = {
lastBuildDate: currentDate.toUTCString(),
updated: currentDate.toISOString(),
ttl: Math.trunc(config.cache.routeExpire / 60),
ttl,
atomlink: ctx.req.url,
...data,
};
Expand Down
4 changes: 0 additions & 4 deletions lib/router.js
Original file line number Diff line number Diff line change
Expand Up @@ -66,10 +66,6 @@ router.get('/konachan.net/post/popular_recent/:period', lazyloadRouteHandler('./
// router.get('/pornhub/:language?/model/:username/:sort?', lazyloadRouteHandler('./routes/pornhub/model'));
// router.get('/pornhub/:language?/pornstar/:username/:sort?', lazyloadRouteHandler('./routes/pornhub/pornstar'));

// yande.re
router.get('/yande.re/post/popular_recent', lazyloadRouteHandler('./routes/yande.re/post-popular-recent'));
router.get('/yande.re/post/popular_recent/:period', lazyloadRouteHandler('./routes/yande.re/post-popular-recent'));

// EZTV
router.get('/eztv/torrents/:imdb_id', lazyloadRouteHandler('./routes/eztv/imdb'));

Expand Down
2 changes: 1 addition & 1 deletion lib/routes-deprecated/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ const path = require('path');

let gitHash;
try {
gitHash = require('git-rev-sync').short();
gitHash = require('child_process').execSync('git rev-parse HEAD').toString().trim().slice(0, 7);
} catch {
gitHash = (process.env.HEROKU_SLUG_COMMIT && process.env.HEROKU_SLUG_COMMIT.slice(0, 7)) || (process.env.VERCEL_GIT_COMMIT_SHA && process.env.VERCEL_GIT_COMMIT_SHA.slice(0, 7)) || 'unknown';
}
Expand Down
Loading

0 comments on commit 6e6a962

Please sign in to comment.