update
This commit is contained in:
parent
9174b540fd
commit
c0faaf1599
21
README.md
21
README.md
@ -29,25 +29,4 @@ REDIS_PREFIX
|
||||
# Redis 連線的資料庫號碼
|
||||
REDIS_DB
|
||||
|
||||
# PostgreSQL 資料庫位址
|
||||
DB_HOST
|
||||
|
||||
# PostgreSQL 資料庫連接埠
|
||||
DB_PORT
|
||||
|
||||
# PostgreSQL 資料庫使用者
|
||||
DB_USER
|
||||
|
||||
# PostgreSQL 資料庫密碼
|
||||
DB_PASSWORD
|
||||
|
||||
# PostgreSQL 資料庫名稱
|
||||
DB_NAME
|
||||
|
||||
# PostgreSQL 資料庫連接池最大連線數
|
||||
DB_POOL_MAX
|
||||
|
||||
# PostgreSQL 資料庫連接池閒置連線數
|
||||
DB_POOL_MIN
|
||||
|
||||
```
|
||||
|
@ -1,114 +0,0 @@
|
||||
const pg = require('pg');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const config = require('src/config/index.js');
|
||||
|
||||
// schema file name format ######_name.sql
|
||||
const schemaDir = path.resolve(__dirname, '..', 'schemas');
|
||||
|
||||
const db = new pg.Client({
|
||||
host: config.database.host,
|
||||
port: config.database.port,
|
||||
user: config.database.user,
|
||||
password: config.database.password,
|
||||
database: config.database.dbname,
|
||||
});
|
||||
|
||||
(async () => {
|
||||
await db.connect();
|
||||
|
||||
await db.query(`select now();`);
|
||||
|
||||
let version = -1;
|
||||
|
||||
// check migrate record table exists
|
||||
const checkTable = await db.query(
|
||||
`
|
||||
select exists(
|
||||
select 1
|
||||
from "information_schema"."tables"
|
||||
where
|
||||
"table_schema" = $1
|
||||
and "table_name" = $2
|
||||
) as exists
|
||||
`,
|
||||
['public', 'migrate_log']
|
||||
);
|
||||
|
||||
if (checkTable.rowCount > 0 && checkTable.rows[0].exists === true) {
|
||||
// version table exists
|
||||
const maxVersion = await db.query(`select max("version")::integer as version from "public"."migrate_log"`);
|
||||
if (maxVersion.rowCount > 0 && maxVersion.rows[0] && maxVersion.rows[0].version !== null) version = maxVersion.rows[0].version; // eslint-disable-line
|
||||
} else {
|
||||
// create version table
|
||||
await db.query(`create table "public"."migrate_log" (
|
||||
"version" integer not null primary key,
|
||||
"created_time" timestamptz not null default now()
|
||||
);`);
|
||||
}
|
||||
|
||||
console.info(`Database Now Version: ${version}`);
|
||||
|
||||
// read all schema files
|
||||
const schemaList = await fs.promises.readdir(schemaDir);
|
||||
|
||||
/**
|
||||
* @type {{[x: number]: boolean}}
|
||||
*/
|
||||
const checkDuplicate = {};
|
||||
|
||||
/**
|
||||
* @type {{version: number, filename: string}[]}
|
||||
*/
|
||||
const versionList = schemaList
|
||||
.map(file => {
|
||||
const strs = file.split('_');
|
||||
const v = parseInt(strs[0], 10);
|
||||
if (isNaN(version)) throw new Error(`schema filename format error (######_name.sql)`); // eslint-disable-line
|
||||
|
||||
if (v in checkDuplicate) throw new Error(`schema file version (${v}) is duplicate`);
|
||||
|
||||
checkDuplicate[v] = true;
|
||||
|
||||
return { version: v, filename: file };
|
||||
})
|
||||
.filter(t => t && t.version > version)
|
||||
.sort((a, b) => a.version - b.version);
|
||||
|
||||
// 沒有需要更新的檔案
|
||||
if (versionList.length === 0) return;
|
||||
|
||||
await db.query('begin');
|
||||
|
||||
try {
|
||||
const vers = [];
|
||||
// write all schema file
|
||||
for (const it of versionList) {
|
||||
vers.push(`(${it.version})`);
|
||||
|
||||
console.info(`Write Version: ${it.version}`);
|
||||
|
||||
const fileContent = await fs.promises.readFile(path.resolve(schemaDir, it.filename), 'utf-8');
|
||||
|
||||
await db.query(fileContent);
|
||||
}
|
||||
|
||||
await db.query(`insert into "public"."migrate_log" ("version") values ${vers.join(',')}`);
|
||||
|
||||
await db.query('commit');
|
||||
} catch (err) {
|
||||
await db.query('rollback');
|
||||
throw err;
|
||||
}
|
||||
})()
|
||||
.then(() => {
|
||||
console.info('Database Migrate Finish');
|
||||
})
|
||||
.catch(err => {
|
||||
console.error('Database Migrate Failed, ', err);
|
||||
process.exit(1);
|
||||
})
|
||||
.finally(() => {
|
||||
db.end();
|
||||
});
|
@ -1,58 +0,0 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const readline = require('readline');
|
||||
const { padLeft } = require('src/utils/index.js');
|
||||
|
||||
const schemaDir = path.resolve(__dirname, '..', 'schemas');
|
||||
|
||||
(async () => {
|
||||
const args = process.argv.slice(2);
|
||||
let filename = args[0] || '';
|
||||
if (args.length === 0) {
|
||||
// use readline
|
||||
filename = await new Promise(resolve => {
|
||||
const rl = readline.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout,
|
||||
});
|
||||
|
||||
rl.prompt();
|
||||
rl.question('schema filename: ', ans => {
|
||||
resolve(ans.replace(' ', '_'));
|
||||
|
||||
rl.close();
|
||||
});
|
||||
|
||||
rl.once('close', resolve);
|
||||
});
|
||||
}
|
||||
|
||||
if (filename === '') throw new Error('no schema filename');
|
||||
|
||||
const schemaFiles = await fs.promises.readdir(schemaDir);
|
||||
let version = 0;
|
||||
|
||||
schemaFiles.forEach(name => {
|
||||
if (!name.endsWith('.sql')) return;
|
||||
|
||||
const strInt = name.split(/_/g)[0];
|
||||
const v = parseInt(strInt, 10);
|
||||
if (isNaN(v)) return; // eslint-disable-line
|
||||
|
||||
if (v > version) version = v;
|
||||
});
|
||||
|
||||
// 版本要比最後一筆加一
|
||||
version += 1;
|
||||
|
||||
const schemaName = `${padLeft(`${version}`, 6, '0')}_${filename}.sql`;
|
||||
|
||||
const schemaText = `-- Created Time ${new Date().toISOString()}`;
|
||||
|
||||
await fs.promises.writeFile(path.resolve(schemaDir, schemaName), schemaText, 'utf-8');
|
||||
|
||||
console.info(`File: ${path.resolve(schemaDir, schemaName)} Created!`);
|
||||
})().catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
@ -2,37 +2,16 @@ const { env } = process;
|
||||
|
||||
module.exports = {
|
||||
server: {
|
||||
url: env.SERVER_URL || 'http://localhost:10230',
|
||||
cms_api_url: env.SERVER_CMS_API_URL || 'http://localhost:10230',
|
||||
url: env.SERVER_URL || "http://localhost:10230",
|
||||
port: parseInt(env.SERVER_PORT, 10) || 10230,
|
||||
jwt_secret: env.SERVER_JWT_SECRET || 'testsecret',
|
||||
jwt_secret: env.SERVER_JWT_SECRET || "testsecret",
|
||||
jwt_expire: parseInt(env.SERVER_JWT_EXPIRE, 10) || 60 * 60 * 24 * 30, // 30 day
|
||||
tos_url: env.SERVER_TOS_URL || 'http://localhost:10230',
|
||||
course_contract_url: env.SERVER_COURSE_CONTRACT_URL || 'http://localhost:10230',
|
||||
cms_limit_enabled: env.SERVER_CMS_LIMIT_ENABLED !== '0', // 啟用CMS routing 限制
|
||||
cms_limit_token: env.SERVER_CMS_LIMIT_TOKEN || '',
|
||||
},
|
||||
redis: {
|
||||
host: env.REDIS_HOST || 'localhost',
|
||||
port: parseInt(env.REDIS_PORT, 10) || 6379,
|
||||
password: env.REDIS_PASSWORD || '',
|
||||
prefix: env.REDIS_PREFIX || '',
|
||||
db: parseInt(env.REDIS_DB, 10) || 0,
|
||||
},
|
||||
sso: {
|
||||
authorized_endpoint: env.SSO_AUTHORIZED_ENDPOINT || '',
|
||||
token_endpoint: env.SSO_TOKEN_ENDPOINT || '',
|
||||
logout_endpoint: env.SSO_LOGOUT_ENDPOINT || '',
|
||||
client_id: env.SSO_CLIENT_ID || '',
|
||||
client_secret: env.SSO_CLIENT_SECRET || '',
|
||||
},
|
||||
database: {
|
||||
host: env.DB_HOST || 'localhost',
|
||||
port: parseInt(env.DB_PORT, 10) || 5432,
|
||||
user: env.DB_USER || 'postgres',
|
||||
password: env.DB_PASSWORD || '',
|
||||
dbname: env.DB_NAME || 'professor_x',
|
||||
pool_max: parseInt(env.DB_POOL_MAX, 10) || 5,
|
||||
pool_min: parseInt(env.DB_POOL_MIN, 10) || 2,
|
||||
authorized_endpoint: env.SSO_AUTHORIZED_ENDPOINT || "",
|
||||
token_endpoint: env.SSO_TOKEN_ENDPOINT || "",
|
||||
logout_endpoint: env.SSO_LOGOUT_ENDPOINT || "",
|
||||
client_id: env.SSO_CLIENT_ID || "",
|
||||
client_secret: env.SSO_CLIENT_SECRET || "",
|
||||
},
|
||||
};
|
||||
|
@ -2,9 +2,7 @@
|
||||
const constants = {
|
||||
PAGE_SIZE: 20,
|
||||
OPENID_EXPIRE: 300, // 5min
|
||||
INTERNAL_REGULATION_CACHE_TTL: 1800, // 30min
|
||||
REPORT_CACHE_TTL: 600, // 10 min
|
||||
ALLOW_GROUP_ROLE: ['Ironman3']
|
||||
ALLOW_GROUP_ROLE: ["Ironman3"],
|
||||
};
|
||||
|
||||
module.exports = constants;
|
||||
|
@ -1,14 +1,14 @@
|
||||
const { resp } = require('src/utils/response/index.js');
|
||||
const redis = require('src/utils/redis.js');
|
||||
const sso = require('src/utils/sso/index.js');
|
||||
const { OPENID_EXPIRE } = require('src/constants/index.js');
|
||||
const uuid = require('uuid');
|
||||
const url = require('url');
|
||||
const { resp } = require("src/utils/response/index.js");
|
||||
const { get: getCacheInstance } = require("src/utils/cache.js");
|
||||
const sso = require("src/utils/sso/index.js");
|
||||
const { OPENID_EXPIRE } = require("src/constants/index.js");
|
||||
const uuid = require("uuid");
|
||||
const url = require("url");
|
||||
|
||||
const controller = {};
|
||||
module.exports = controller;
|
||||
|
||||
controller.loginSSO = () => async ctx => {
|
||||
controller.loginSSO = () => async (ctx) => {
|
||||
const { back_url: backURL } = ctx.query;
|
||||
|
||||
const state = uuid.v4();
|
||||
@ -16,17 +16,18 @@ controller.loginSSO = () => async ctx => {
|
||||
const authURL = sso.getAuthURL(state);
|
||||
|
||||
// store back url to cache
|
||||
const cacheKey = redis.Key.ssoLoginCache(state);
|
||||
const cacheKey = `login-${state}`;
|
||||
const cache = getCacheInstance();
|
||||
|
||||
await redis.set(cacheKey, JSON.stringify({ back_url: backURL }), 'EX', OPENID_EXPIRE);
|
||||
cache.set(cacheKey, JSON.stringify({ back_url: backURL }), true);
|
||||
|
||||
const u = new url.URL(authURL);
|
||||
|
||||
ctx.resp(resp.Success, { url: u.toString() });
|
||||
};
|
||||
|
||||
controller.logout = () => async ctx => {
|
||||
let link = '';
|
||||
controller.logout = () => async (ctx) => {
|
||||
let link = "";
|
||||
|
||||
if (ctx.token.sso) {
|
||||
link = sso.getLogoutURL();
|
||||
@ -35,6 +36,6 @@ controller.logout = () => async ctx => {
|
||||
ctx.resp(resp.Success, { url: link });
|
||||
};
|
||||
|
||||
controller.getInfo = () => async ctx => {
|
||||
controller.getInfo = () => async (ctx) => {
|
||||
ctx.resp(resp.Success, {});
|
||||
};
|
||||
};
|
||||
|
@ -1,32 +1,33 @@
|
||||
const debug = require('debug')('ctrl:common');
|
||||
const util = require('util');
|
||||
const url = require('url');
|
||||
const sso = require('src/utils/sso/index.js');
|
||||
const redis = require('src/utils/redis.js');
|
||||
const { codeMessage, APIError } = require('src/utils/response/index.js');
|
||||
const config = require('src/config/index.js');
|
||||
const { jwt } = require('src/utils/pkgs.js');
|
||||
const debug = require("debug")("ctrl:common");
|
||||
const util = require("util");
|
||||
const url = require("url");
|
||||
const sso = require("src/utils/sso/index.js");
|
||||
const { get: getCacheInstance } = require("src/utils/cache.js");
|
||||
const { codeMessage, APIError } = require("src/utils/response/index.js");
|
||||
const config = require("src/config/index.js");
|
||||
const { jwt } = require("src/utils/pkgs.js");
|
||||
|
||||
const controller = {};
|
||||
module.exports = controller;
|
||||
|
||||
controller.verifyCode = () => async ctx => {
|
||||
controller.verifyCode = () => async (ctx) => {
|
||||
const { code, session_state: sessionState, state } = ctx.query;
|
||||
|
||||
// logout flow redirect tot frontend
|
||||
if (state === 'logout') {
|
||||
if (state === "logout") {
|
||||
ctx.redirect(config.server.frontend_url);
|
||||
return;
|
||||
}
|
||||
|
||||
// get back url from redis
|
||||
const cacheKey = redis.Key.ssoLoginCache(state);
|
||||
const cacheKey = `login-${state}`;
|
||||
const cache = getCacheInstance();
|
||||
|
||||
const data = await redis.get(cacheKey);
|
||||
if (!data) ctx.throw('get login cache fail');
|
||||
const data = cache.get(cacheKey);
|
||||
if (!data) ctx.throw("get login cache fail");
|
||||
const stateObj = JSON.parse(data);
|
||||
const { back_url: backURL } = stateObj;
|
||||
if (!backURL) ctx.throw('cache data missing');
|
||||
if (!backURL) ctx.throw("cache data missing");
|
||||
|
||||
const u = new url.URL(backURL);
|
||||
|
||||
@ -42,14 +43,17 @@ controller.verifyCode = () => async ctx => {
|
||||
config.server.jwt_secret,
|
||||
{
|
||||
expiresIn: config.server.jwt_expire,
|
||||
issuer: 'lawsnote',
|
||||
issuer: "lawsnote",
|
||||
}
|
||||
);
|
||||
|
||||
u.searchParams.append('success', Buffer.from(JSON.stringify({ token: jwtToken })).toString('base64'));
|
||||
u.searchParams.append(
|
||||
"success",
|
||||
Buffer.from(JSON.stringify({ token: jwtToken })).toString("base64")
|
||||
);
|
||||
|
||||
try {
|
||||
await redis.del(cacheKey);
|
||||
cache.del(cacheKey);
|
||||
} catch (err) {
|
||||
debug(`delete cache fail: ${util.inspect(err, false, null)}`);
|
||||
}
|
||||
@ -66,7 +70,10 @@ controller.verifyCode = () => async ctx => {
|
||||
|
||||
errObj.errorStack = err.stack;
|
||||
errObj.errorMessage = err.message;
|
||||
u.searchParams.append('error', Buffer.from(JSON.stringify(errObj)).toString('base64'));
|
||||
u.searchParams.append(
|
||||
"error",
|
||||
Buffer.from(JSON.stringify(errObj)).toString("base64")
|
||||
);
|
||||
}
|
||||
|
||||
ctx.redirect(u.toString());
|
||||
|
8
index.js
8
index.js
@ -1,9 +1,11 @@
|
||||
require('dotenv').config();
|
||||
require("dotenv").config();
|
||||
|
||||
const config = require('src/config/index.js');
|
||||
const app = require('./server.js');
|
||||
const config = require("src/config/index.js");
|
||||
const { new: newCacheInstance } = require("src/utils/cache.js");
|
||||
const app = require("./server.js");
|
||||
|
||||
async function runServer() {
|
||||
newCacheInstance();
|
||||
const server = app.listen(config.server.port, () => {
|
||||
// @ts-ignore
|
||||
console.info(`server start on port ${server.address().port}`);
|
||||
|
@ -1,25 +0,0 @@
|
||||
const db = require('src/utils/database.js');
|
||||
|
||||
class Base {
|
||||
constructor() {
|
||||
this.cols = [];
|
||||
this.schema = 'public';
|
||||
this.table = '';
|
||||
this.db = db;
|
||||
}
|
||||
|
||||
async transaction(trxFunc) {
|
||||
if (typeof trxFunc !== 'function') throw new Error('transaction function type error');
|
||||
return this.db.transaction(trxFunc);
|
||||
}
|
||||
|
||||
async checkSchema() {
|
||||
await this.db
|
||||
.withSchema(this.schema)
|
||||
.from(this.table)
|
||||
.select(...this.cols)
|
||||
.limit(1);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Base;
|
@ -1,14 +0,0 @@
|
||||
/* eslint-disable func-names */
|
||||
const Base = require('./base.js');
|
||||
|
||||
class Common extends Base {
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
|
||||
async test() {
|
||||
// nothing
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Common;
|
@ -1,25 +0,0 @@
|
||||
// const debug = require('debug')('models:account');
|
||||
const Base = require('src/model/base.js');
|
||||
|
||||
/**
|
||||
* @typedef AccountModel
|
||||
* @property {string} id
|
||||
* @property {string} phone
|
||||
* @property {string} password with bcrypt
|
||||
* @property {string} display_name
|
||||
* @property {string} secret
|
||||
* @property {string} created_time
|
||||
* @property {string} updated_time
|
||||
*/
|
||||
|
||||
class Acconut extends Base {
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
|
||||
async test() {
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Acconut;
|
11
package.json
11
package.json
@ -5,35 +5,26 @@
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"start": "node index.js",
|
||||
"migrate": "node bin/db-migrate.js",
|
||||
"test": "mocha --timeout 5000 --exit test/ && jest --passWithNoTests --runInBand --coverage .",
|
||||
"test-with-db": "npm run migrate && npm run test",
|
||||
"new-schema": "node bin/migrate-tool.js",
|
||||
"postinstall": "node -e \"var s='../',d='node_modules/src',fs=require('fs');fs.exists(d,function(e){e||fs.symlinkSync(s,d,'dir')});\""
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "Jay <admin@trj.tw>",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@google-cloud/storage": "5.4.0",
|
||||
"@koa/cors": "^3.0.0",
|
||||
"@koa/router": "^8.0.5",
|
||||
"@mtfos/swagger-generator": "git+https://github.com/otakukaze/swagger-generator.git#1.2.2",
|
||||
"axios": "0.21.0",
|
||||
"@mtfos/swagger-generator": "git+https://github.com/otakukaze/swagger-generator.git#1.4.1",
|
||||
"debug": "4.2.0",
|
||||
"dotenv": "^8.2.0",
|
||||
"got": "^11.8.2",
|
||||
"ioredis": "4.19.0",
|
||||
"joi": "17.3.0",
|
||||
"jsonwebtoken": "8.5.1",
|
||||
"knex": "0.21.15",
|
||||
"koa": "^2.11.0",
|
||||
"koa-body": "^4.1.1",
|
||||
"koa-logger": "^3.2.1",
|
||||
"koa-mount": "4.0.0",
|
||||
"koa-range": "0.3.0",
|
||||
"koa-static": "5.0.0",
|
||||
"pg": "8.4.1",
|
||||
"uuid": "8.3.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
@ -1,50 +0,0 @@
|
||||
--
|
||||
-- PostgreSQL database dump
|
||||
--
|
||||
|
||||
-- Dumped from database version 11.7
|
||||
-- Dumped by pg_dump version 11.7
|
||||
|
||||
SET statement_timeout = 0;
|
||||
SET lock_timeout = 0;
|
||||
SET idle_in_transaction_session_timeout = 0;
|
||||
SET client_encoding = 'UTF8';
|
||||
SET standard_conforming_strings = on;
|
||||
SELECT pg_catalog.set_config('search_path', '', false);
|
||||
SET check_function_bodies = false;
|
||||
SET xmloption = content;
|
||||
SET client_min_messages = warning;
|
||||
SET row_security = off;
|
||||
--
|
||||
-- Name: log; Type: SCHEMA; Schema: -; Owner: -
|
||||
--
|
||||
|
||||
CREATE SCHEMA log;
|
||||
|
||||
|
||||
--
|
||||
-- Name: ltree; Type: EXTENSION; Schema: -; Owner: -
|
||||
--
|
||||
|
||||
CREATE EXTENSION IF NOT EXISTS ltree WITH SCHEMA public;
|
||||
|
||||
|
||||
--
|
||||
-- Name: EXTENSION ltree; Type: COMMENT; Schema: -; Owner: -
|
||||
--
|
||||
|
||||
COMMENT ON EXTENSION ltree IS 'data type for hierarchical tree-like structures';
|
||||
|
||||
|
||||
--
|
||||
-- Name: pgcrypto; Type: EXTENSION; Schema: -; Owner: -
|
||||
--
|
||||
|
||||
CREATE EXTENSION IF NOT EXISTS pgcrypto WITH SCHEMA public;
|
||||
|
||||
|
||||
--
|
||||
-- Name: EXTENSION pgcrypto; Type: COMMENT; Schema: -; Owner: -
|
||||
--
|
||||
|
||||
COMMENT ON EXTENSION pgcrypto IS 'cryptographic functions';
|
48
utils/cache.js
Normal file
48
utils/cache.js
Normal file
@ -0,0 +1,48 @@
|
||||
class Cache {
|
||||
constructor() {
|
||||
this.kv = {};
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} key
|
||||
* @param {string} value
|
||||
* @param {boolean?} noOverride
|
||||
*/
|
||||
set(key, value, noOverride) {
|
||||
if (noOverride && key in this.kv) {
|
||||
throw new Error("key exists");
|
||||
}
|
||||
|
||||
this.kv[key] = value;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} key
|
||||
* @return {string?}
|
||||
*/
|
||||
get(key) {
|
||||
return this.kv[key] || null;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string[]} keys
|
||||
*/
|
||||
del(...keys) {
|
||||
for (const key of keys) {
|
||||
delete this.kv[key];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let cache = null;
|
||||
|
||||
exports.new = function () {
|
||||
if (cache) throw new Error("cache already initiate");
|
||||
cache = new Cache();
|
||||
return cache;
|
||||
};
|
||||
|
||||
exports.get = function () {
|
||||
if (!cache) throw new Error("cache not initiate");
|
||||
return cache;
|
||||
};
|
@ -1,19 +0,0 @@
|
||||
const knex = require('knex');
|
||||
const config = require('src/config/index.js');
|
||||
|
||||
const pool = knex({
|
||||
client: 'pg',
|
||||
connection: {
|
||||
user: config.database.user,
|
||||
password: config.database.password,
|
||||
host: config.database.host,
|
||||
port: config.database.port,
|
||||
database: config.database.dbname,
|
||||
},
|
||||
pool: {
|
||||
max: config.database.pool_max,
|
||||
min: config.database.pool_min,
|
||||
},
|
||||
});
|
||||
|
||||
module.exports = pool;
|
@ -1,48 +0,0 @@
|
||||
const IORedis = require("ioredis");
|
||||
const config = require("src/config/index.js");
|
||||
|
||||
class Redis extends IORedis {
|
||||
constructor() {
|
||||
let { prefix } = config.redis;
|
||||
const { host, port, password, db } = config.redis;
|
||||
if (prefix && !/:$/.test(prefix)) prefix += ":";
|
||||
super({
|
||||
host,
|
||||
port,
|
||||
password,
|
||||
db,
|
||||
});
|
||||
|
||||
this.prefix = prefix;
|
||||
|
||||
const self = this;
|
||||
// key pattern functions
|
||||
this.Key = {
|
||||
/**
|
||||
* SSO 登入暫存
|
||||
* @param {string} s state
|
||||
* @return {string}
|
||||
*/
|
||||
ssoLoginCache: (s) => self.getKeyWithPrefix(`sso-login:${s}`),
|
||||
/**
|
||||
* 儲存 Token
|
||||
* @param {string} s state
|
||||
* @return {string}
|
||||
*/
|
||||
userToken: (s) => self.getKeyWithPrefix(`token:${s}`),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* combine key and prefix
|
||||
* @param {string} s
|
||||
* @return {string}
|
||||
*/
|
||||
getKeyWithPrefix(s) {
|
||||
if (typeof s !== "string") throw new Error("input key not a string");
|
||||
|
||||
return `${this.prefix}${s}`;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = new Redis();
|
@ -1,9 +1,9 @@
|
||||
const joi = require('joi');
|
||||
const url = require('url');
|
||||
const querystring = require('querystring');
|
||||
const got = require('got');
|
||||
const config = require('src/config/index.js');
|
||||
const { jwt } = require('src/utils/pkgs.js');
|
||||
const joi = require("joi");
|
||||
const url = require("url");
|
||||
const querystring = require("querystring");
|
||||
const got = require("got");
|
||||
const config = require("src/config/index.js");
|
||||
const { jwt } = require("src/utils/pkgs.js");
|
||||
|
||||
const mod = {};
|
||||
module.exports = mod;
|
||||
@ -11,14 +11,14 @@ module.exports = mod;
|
||||
/**
|
||||
* @return {string}
|
||||
*/
|
||||
mod.getAuthURL = state => {
|
||||
mod.getAuthURL = (state) => {
|
||||
const input = joi
|
||||
.object({
|
||||
authorized_endpoint: joi.string().required(),
|
||||
token_endpoint: joi.string().required(),
|
||||
client_id: joi.string().required(),
|
||||
client_secret: joi.string().required(),
|
||||
state: joi.string().allow('', null).default(''),
|
||||
state: joi.string().allow("", null).default(""),
|
||||
})
|
||||
.unknown()
|
||||
.validate({ ...config.sso, state });
|
||||
@ -29,12 +29,12 @@ mod.getAuthURL = state => {
|
||||
*/
|
||||
const { value } = input;
|
||||
|
||||
const redirectUri = new url.URL('/oauth/redirect', config.server.url);
|
||||
const redirectUri = new url.URL("/oauth/redirect", config.server.url);
|
||||
|
||||
const qs = {
|
||||
client_id: value.client_id,
|
||||
scope: 'openid',
|
||||
response_type: 'code',
|
||||
scope: "offline_access",
|
||||
response_type: "code",
|
||||
redirect_uri: redirectUri.toString(),
|
||||
};
|
||||
if (value.state) qs.state = state;
|
||||
@ -53,13 +53,20 @@ mod.getLogoutURL = () => {
|
||||
.unknown()
|
||||
.validate({ ...config.sso });
|
||||
if (input.error) throw new Error(input.error.message);
|
||||
const redirectUri = new url.URL('/oauth/redirect', config.server.url);
|
||||
const redirectUri = new url.URL("/oauth/redirect", config.server.url);
|
||||
|
||||
const qs = { state: 'logout', redirect_uri: redirectUri.toString() };
|
||||
const qs = { state: "logout", redirect_uri: redirectUri.toString() };
|
||||
|
||||
return `${input.value.logout_endpoint}?${querystring.stringify(qs)}`;
|
||||
};
|
||||
|
||||
mod.getUserInfo = async (token) => {
|
||||
const input = joi
|
||||
.object()
|
||||
.unknown()
|
||||
.validateAsync({ ...config.sso, token });
|
||||
};
|
||||
|
||||
/**
|
||||
* @typedef SSOAccount
|
||||
* @property {string} access_token
|
||||
@ -90,7 +97,7 @@ mod.getToken = async (code, state) => {
|
||||
*/
|
||||
const { value } = input;
|
||||
|
||||
const redirectUri = new url.URL('/oauth/redirect', config.server.url);
|
||||
const redirectUri = new url.URL("/oauth/redirect", config.server.url);
|
||||
|
||||
const qs = {
|
||||
client_id: value.client_id,
|
||||
@ -98,32 +105,42 @@ mod.getToken = async (code, state) => {
|
||||
redirect_uri: redirectUri.toString(),
|
||||
code: value.code,
|
||||
client_session_state: value.state,
|
||||
grant_type: 'authorization_code',
|
||||
grant_type: "authorization_code",
|
||||
};
|
||||
|
||||
const resp = await got.default.post(value.token_endpoint, {
|
||||
headers: {
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
},
|
||||
body: querystring.stringify(qs),
|
||||
responseType: 'json',
|
||||
responseType: "json",
|
||||
});
|
||||
|
||||
const { body } = resp;
|
||||
if (!body) throw new Error('resopnse body empty');
|
||||
if (!body) throw new Error("resopnse body empty");
|
||||
|
||||
const { id_token: idToken, access_token: accessToken, refresh_token: refreshToken } = body;
|
||||
if (!idToken) throw new Error('get id token fail');
|
||||
const {
|
||||
id_token: idToken,
|
||||
access_token: accessToken,
|
||||
refresh_token: refreshToken,
|
||||
} = body;
|
||||
// if (!idToken) throw new Error("get id token fail");
|
||||
|
||||
const decoded = jwt.decode(idToken);
|
||||
if (!decoded || typeof decoded !== 'object') throw new Error('jwt decode fail');
|
||||
console.log('decoded ::: ', decoded)
|
||||
console.log('body ::: ', body)
|
||||
// const decoded = jwt.decode(idToken);
|
||||
// if (!decoded || typeof decoded !== "object")
|
||||
// throw new Error("jwt decode fail");
|
||||
// console.log("decoded ::: ", decoded);
|
||||
|
||||
const decoded = jwt.decode(accessToken);
|
||||
// decode access token
|
||||
console.log("token ::: ", jwt.decode(accessToken));
|
||||
|
||||
console.log("body ::: ", body);
|
||||
// @ts-ignore
|
||||
const { preferred_username: preferredUsername } = decoded;
|
||||
if (!preferredUsername) throw new Error('id token field missing');
|
||||
if (!preferredUsername) throw new Error("id token field missing");
|
||||
|
||||
const displayName = `${decoded.family_name ?? ''}${decoded.given_name ?? ''}`;
|
||||
const displayName = `${decoded.family_name ?? ""}${decoded.given_name ?? ""}`;
|
||||
|
||||
/** @type {SSOAccount} */
|
||||
const ssoAccount = {
|
||||
@ -132,7 +149,7 @@ mod.getToken = async (code, state) => {
|
||||
user_id: decoded.sub,
|
||||
username: preferredUsername.toLowerCase(),
|
||||
display_name: displayName ?? preferredUsername,
|
||||
email: decoded.email ?? '',
|
||||
email: decoded.email ?? "",
|
||||
};
|
||||
|
||||
return ssoAccount;
|
||||
|
Loading…
Reference in New Issue
Block a user