Merge branch 'master' into 'main'
Master See merge request france/recherche-innovation/sud-ouest/tec/mia/partenariat_cortex/ihm!2
This commit is contained in:
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1 +1,2 @@
|
|||||||
node_modules.zip
|
node_modules.zip
|
||||||
|
.DS_Store
|
||||||
114
app.js
114
app.js
@@ -4,10 +4,10 @@ const dotenv = require('dotenv')
|
|||||||
const app = express()
|
const app = express()
|
||||||
const path = require("path")
|
const path = require("path")
|
||||||
const publicDir = path.join(__dirname, './public')
|
const publicDir = path.join(__dirname, './public')
|
||||||
const bcrypt = require("bcryptjs")
|
|
||||||
const bodyParser = require('body-parser')
|
const bodyParser = require('body-parser')
|
||||||
const dateFormat = require('dateformat');
|
const functions = require(publicDir + "/functions");
|
||||||
const uuid = require('uuid');
|
const bcrypt = require("bcrypt")
|
||||||
|
const saltRounds = 10;
|
||||||
|
|
||||||
|
|
||||||
/* Connexion à la BDD MySQL */
|
/* Connexion à la BDD MySQL */
|
||||||
@@ -34,49 +34,20 @@ app.use(bodyParser.urlencoded({extended: false}));
|
|||||||
app.use(express.json())
|
app.use(express.json())
|
||||||
|
|
||||||
|
|
||||||
function hashPassword(plaintextPassword) {
|
async function recupListeSessions() {
|
||||||
bcrypt.hash(plaintextPassword, 10)
|
let today = functions.getNowDate("yyyymmdd");
|
||||||
.then(hash => {
|
|
||||||
return hash;
|
|
||||||
})
|
|
||||||
.catch(err => {
|
|
||||||
console.log(err)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
function comparePassword(plaintextPassword, hash) {
|
// On récupère la liste des sessions actives et pour lesquelles il reste de la place
|
||||||
bcrypt.compare(plaintextPassword, hash)
|
db.query('SELECT ID, topic, DATE_FORMAT(scheduled_on, "%d/%m/%Y") as "date", DATE_FORMAT(scheduled_on, "%Hh%i") as "heure", IF(nb_of_attended-nb_of_participants=0, true, false) as "maxAtteint" FROM session WHERE DATE_FORMAT(scheduled_on, "%Y%m%d") >= ?', [today], async (error, result) => {
|
||||||
.then(result => {
|
if(error){ console.log(error); }
|
||||||
return result;
|
return result;
|
||||||
})
|
});
|
||||||
.catch(err => {
|
}
|
||||||
console.log(err)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
function rollback(session, user) {
|
|
||||||
db.query('DELETE from participation WHERE user=? AND session=?', [user, session], (error, result) => {
|
|
||||||
if(error){ console.log(error) }
|
|
||||||
if(result && result.affectedRows > 0) {
|
|
||||||
db.query('DELETE from user WHERE id=?', [user], (error, result) => {
|
|
||||||
if(error){ console.log(error) }
|
|
||||||
if(result && result.affectedRows > 0) {
|
|
||||||
db.query('UPDATE session SET nb_of_participants = nb_of_participants - 1 WHERE ID=?', [session], (error, result) => {
|
|
||||||
if(error){ console.log(error) }
|
|
||||||
if(result && result.affectedRows > 0) {
|
|
||||||
console.log('Rollback effectué');
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
// ******************************************* Arrivée sur la page d'accueil *******************************************
|
// ******************************************* Arrivée sur la page d'accueil *******************************************
|
||||||
app.get("/", (req, res) => {
|
app.get("/", (req, res) => {
|
||||||
let today = dateFormat(new Date(), "yyyymmdd");
|
let today = functions.getNowDate("yyyymmdd");
|
||||||
|
|
||||||
// On récupère la liste des sessions actives et pour lesquelles il reste de la place
|
// On récupère la liste des sessions actives et pour lesquelles il reste de la place
|
||||||
db.query('SELECT ID, topic, DATE_FORMAT(scheduled_on, "%d/%m/%Y") as "date", DATE_FORMAT(scheduled_on, "%Hh%i") as "heure", IF(nb_of_attended-nb_of_participants=0, true, false) as "maxAtteint" FROM session WHERE DATE_FORMAT(scheduled_on, "%Y%m%d") >= ?', [today], async (error, result) => {
|
db.query('SELECT ID, topic, DATE_FORMAT(scheduled_on, "%d/%m/%Y") as "date", DATE_FORMAT(scheduled_on, "%Hh%i") as "heure", IF(nb_of_attended-nb_of_participants=0, true, false) as "maxAtteint" FROM session WHERE DATE_FORMAT(scheduled_on, "%Y%m%d") >= ?', [today], async (error, result) => {
|
||||||
@@ -87,11 +58,11 @@ app.get("/", (req, res) => {
|
|||||||
else {
|
else {
|
||||||
res.render("login-session", { select: result} );
|
res.render("login-session", { select: result} );
|
||||||
}
|
}
|
||||||
})
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
app.get("/index", (req, res) => { res.render("index") });
|
app.get("/index", (req, res) => { res.render("index") });
|
||||||
app.get("/login", (req, res) => { res.render("login") });
|
app.get("/login", (req, res) => { res.render("login") });
|
||||||
app.get("/register", (req, res) => {
|
app.get("/register", (req, res) => {
|
||||||
res.render("register", { session: req.query.s, role: req.query.r})
|
res.render("register", { session: req.query.s, role: req.query.r})
|
||||||
});
|
});
|
||||||
@@ -140,8 +111,9 @@ app.post("/auth/register", (req, res) => {
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
let hashedPassword = hashPassword(password);
|
bcrypt.genSalt(saltRounds, (err, salt) => {
|
||||||
db.query('INSERT INTO user SET?', {gender : (gender != undefined ? gender : ""), firstname: firstname, name: name, title: title, email: email, nickname : nickname, password: hashedPassword}, (err, result) => {
|
bcrypt.hash(password, salt, (err, hash) => {
|
||||||
|
db.query('INSERT INTO user SET?', {gender : (gender != undefined ? gender : ""), firstname: firstname, name: name, title: title, email: email, nickname : nickname, password: hash}, (err, result) => {
|
||||||
if(error) {
|
if(error) {
|
||||||
console.log(error)
|
console.log(error)
|
||||||
} else {
|
} else {
|
||||||
@@ -149,6 +121,8 @@ app.post("/auth/register", (req, res) => {
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -171,7 +145,7 @@ app.post("/auth/check-login-no-security", (req, res) => {
|
|||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
// On crée l'utilisateur pour traçabilité
|
// On crée l'utilisateur pour traçabilité
|
||||||
var newUser = uuid.v4();
|
var newUser = functions.getUuid();
|
||||||
db.query('INSERT INTO user SET?', {id: newUser, nickname : nickname, session: session}, (error, result) => {
|
db.query('INSERT INTO user SET?', {id: newUser, nickname : nickname, session: session}, (error, result) => {
|
||||||
if(error){ console.log(error); rollback(session, newUser); }
|
if(error){ console.log(error); rollback(session, newUser); }
|
||||||
if(result && result.affectedRows > 0) {
|
if(result && result.affectedRows > 0) {
|
||||||
@@ -236,7 +210,7 @@ app.post("/auth/check-login", (req, res) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
userId = result[0].ID;
|
userId = result[0].ID;
|
||||||
if (comparePassword(password, result[0].password)) {
|
if (functions.comparePassword(password, result[0].password)) {
|
||||||
// Le user est connecté avec succès : on vérifie qu'il n'est pas déjà inscrit à la session et si pas le cas, on l'inscrit et on incrémente le compteur des participants
|
// Le user est connecté avec succès : on vérifie qu'il n'est pas déjà inscrit à la session et si pas le cas, on l'inscrit et on incrémente le compteur des participants
|
||||||
db.query('SELECT * FROM participation WHERE user = ?', [userId], async (error, result) => {
|
db.query('SELECT * FROM participation WHERE user = ?', [userId], async (error, result) => {
|
||||||
if(error) {
|
if(error) {
|
||||||
@@ -279,12 +253,10 @@ app.post("/auth/check-session", (req, res) => {
|
|||||||
|
|
||||||
const { session, session_password, role } = req.body
|
const { session, session_password, role } = req.body
|
||||||
let listeSessions;
|
let listeSessions;
|
||||||
let today = dateFormat(new Date(), "yyyymmdd");
|
let today = functions.getNowDate("yyyymmdd");
|
||||||
|
|
||||||
db.query('SELECT ID, topic, DATE_FORMAT(scheduled_on, "%d/%m/%Y") as "date", DATE_FORMAT(scheduled_on, "%Hh%i") as "heure", IF(nb_of_attended-nb_of_participants=0, true, false) as "maxAtteint" FROM session WHERE DATE_FORMAT(scheduled_on, "%Y%m%d") >= ?', [today], async (error, result) => {
|
db.query('SELECT ID, topic, DATE_FORMAT(scheduled_on, "%d/%m/%Y") as "date", DATE_FORMAT(scheduled_on, "%Hh%i") as "heure", IF(nb_of_attended-nb_of_participants=0, true, false) as "maxAtteint" FROM session WHERE DATE_FORMAT(scheduled_on, "%Y%m%d") >= ?', [today], async (error, result) => {
|
||||||
if(error){
|
if(error){ console.log(error); }
|
||||||
console.log(error);
|
|
||||||
}
|
|
||||||
if (result.length == 0) {
|
if (result.length == 0) {
|
||||||
res.redirect('');
|
res.redirect('');
|
||||||
}
|
}
|
||||||
@@ -303,24 +275,54 @@ app.post("/auth/check-session", (req, res) => {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
if (result[0].password === session_password) {
|
let sessionDate = result[0].date;
|
||||||
return res.render('login', {"session": session, "role": role, "topic": result[0].topic, "session_date": result[0].date})
|
let sessionTopic = result[0].topic;
|
||||||
}
|
bcrypt.compare(session_password, result[0].password)
|
||||||
|
.then(result => {
|
||||||
|
if (result) { return res.render('login', {"session": session, "role": role, "topic": sessionTopic, "session_date": sessionDate}) }
|
||||||
else {
|
else {
|
||||||
return res.render('login-session', {
|
return res.render('login-session', {
|
||||||
"error": "Mot de passe incorrect : corriger votre saisie",
|
"error": "Mot de passe incorrect : corriger votre saisie",
|
||||||
"select": listeSessions
|
"select": listeSessions
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
})
|
||||||
|
.catch(err => {
|
||||||
|
console.log(err);
|
||||||
|
})
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
// ******************************************* Création d'une nouvelle session *******************************************
|
// ******************************************* Création d'une nouvelle session *******************************************
|
||||||
app.post("/record-session", (req, res) => {
|
app.post("/record-session", async (req, res) => {
|
||||||
const { topic, attended, password, } = req.body
|
const { topic, attended, password, password_confirm, datetimepicker1Input } = req.body
|
||||||
|
|
||||||
|
// On traite d'abord les motifs de rejet du formulaire.
|
||||||
|
if (isNaN(attended) || (!isNaN(attended) && attended < 0)) {
|
||||||
|
return res.render('create-session', {
|
||||||
|
error: "Le nombre de participants n'a pas une valeur correcte."
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if(password !== password_confirm) {
|
||||||
|
return res.render('create-session', {
|
||||||
|
error: 'Vos mots de passe ne correspondent pas'
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
var newSessionId = functions.getUuid();
|
||||||
|
|
||||||
|
bcrypt.genSalt(saltRounds, (err, salt) => {
|
||||||
|
bcrypt.hash(password, salt, (err, hash) => {
|
||||||
|
db.query("INSERT INTO session (id, topic, password, nb_of_attended, scheduled_on) VALUES (?,?,?,?,STR_TO_DATE(?,'%d/%m/%Y %H:%i'))", [newSessionId, topic, hash, attended, datetimepicker1Input], (error, result) => {
|
||||||
|
if(error){ console.log(error); }
|
||||||
|
if(result && result.affectedRows > 0) {
|
||||||
|
res.redirect('/');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
1
node_modules/.bin/color-support
generated
vendored
Symbolic link
1
node_modules/.bin/color-support
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../color-support/bin.js
|
||||||
1
node_modules/.bin/mkdirp
generated
vendored
Symbolic link
1
node_modules/.bin/mkdirp
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../mkdirp/bin/cmd.js
|
||||||
1
node_modules/.bin/node-pre-gyp
generated
vendored
Symbolic link
1
node_modules/.bin/node-pre-gyp
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../@mapbox/node-pre-gyp/bin/node-pre-gyp
|
||||||
1
node_modules/.bin/rimraf
generated
vendored
Symbolic link
1
node_modules/.bin/rimraf
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../rimraf/bin.js
|
||||||
502
node_modules/.package-lock.json
generated
vendored
502
node_modules/.package-lock.json
generated
vendored
@@ -4,22 +4,34 @@
|
|||||||
"lockfileVersion": 3,
|
"lockfileVersion": 3,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"node_modules/@fortawesome/fontawesome-common-types": {
|
"node_modules/@mapbox/node-pre-gyp": {
|
||||||
"version": "6.4.2",
|
"version": "1.0.11",
|
||||||
"resolved": "https://registry.npmjs.org/@fortawesome/fontawesome-common-types/-/fontawesome-common-types-6.4.2.tgz",
|
"resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.11.tgz",
|
||||||
"integrity": "sha512-1DgP7f+XQIJbLFCTX1V2QnxVmpLdKdzzo2k8EmvDOePfchaIGQ9eCHj2up3/jNEbZuBqel5OxiaOJf37TWauRA==",
|
"integrity": "sha512-Yhlar6v9WQgUp/He7BdgzOz8lqMQ8sU+jkCq7Wx8Myc5YFJLbEe7lgui/V7G1qB1DJykHSGwreceSaD60Y0PUQ==",
|
||||||
"hasInstallScript": true,
|
"dependencies": {
|
||||||
"engines": {
|
"detect-libc": "^2.0.0",
|
||||||
"node": ">=6"
|
"https-proxy-agent": "^5.0.0",
|
||||||
|
"make-dir": "^3.1.0",
|
||||||
|
"node-fetch": "^2.6.7",
|
||||||
|
"nopt": "^5.0.0",
|
||||||
|
"npmlog": "^5.0.1",
|
||||||
|
"rimraf": "^3.0.2",
|
||||||
|
"semver": "^7.3.5",
|
||||||
|
"tar": "^6.1.11"
|
||||||
|
},
|
||||||
|
"bin": {
|
||||||
|
"node-pre-gyp": "bin/node-pre-gyp"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@fortawesome/free-solid-svg-icons": {
|
"node_modules/@mapbox/node-pre-gyp/node_modules/nopt": {
|
||||||
"version": "6.4.2",
|
"version": "5.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/@fortawesome/free-solid-svg-icons/-/free-solid-svg-icons-6.4.2.tgz",
|
"resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz",
|
||||||
"integrity": "sha512-sYwXurXUEQS32fZz9hVCUUv/xu49PEJEyUOsA51l6PU/qVgfbTb2glsTEaJngVVT8VqBATRIdh7XVgV1JF1LkA==",
|
"integrity": "sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ==",
|
||||||
"hasInstallScript": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@fortawesome/fontawesome-common-types": "6.4.2"
|
"abbrev": "1"
|
||||||
|
},
|
||||||
|
"bin": {
|
||||||
|
"nopt": "bin/nopt.js"
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=6"
|
"node": ">=6"
|
||||||
@@ -52,6 +64,46 @@
|
|||||||
"node": ">= 0.6"
|
"node": ">= 0.6"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/agent-base": {
|
||||||
|
"version": "6.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz",
|
||||||
|
"integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==",
|
||||||
|
"dependencies": {
|
||||||
|
"debug": "4"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 6.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/agent-base/node_modules/debug": {
|
||||||
|
"version": "4.3.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
|
||||||
|
"integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==",
|
||||||
|
"dependencies": {
|
||||||
|
"ms": "2.1.2"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=6.0"
|
||||||
|
},
|
||||||
|
"peerDependenciesMeta": {
|
||||||
|
"supports-color": {
|
||||||
|
"optional": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/agent-base/node_modules/ms": {
|
||||||
|
"version": "2.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
|
||||||
|
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
|
||||||
|
},
|
||||||
|
"node_modules/ansi-regex": {
|
||||||
|
"version": "5.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
|
||||||
|
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=8"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/anymatch": {
|
"node_modules/anymatch": {
|
||||||
"version": "3.1.3",
|
"version": "3.1.3",
|
||||||
"resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz",
|
"resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz",
|
||||||
@@ -64,6 +116,36 @@
|
|||||||
"node": ">= 8"
|
"node": ">= 8"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/aproba": {
|
||||||
|
"version": "2.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/aproba/-/aproba-2.0.0.tgz",
|
||||||
|
"integrity": "sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ=="
|
||||||
|
},
|
||||||
|
"node_modules/are-we-there-yet": {
|
||||||
|
"version": "2.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-2.0.0.tgz",
|
||||||
|
"integrity": "sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==",
|
||||||
|
"dependencies": {
|
||||||
|
"delegates": "^1.0.0",
|
||||||
|
"readable-stream": "^3.6.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=10"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/are-we-there-yet/node_modules/readable-stream": {
|
||||||
|
"version": "3.6.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz",
|
||||||
|
"integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==",
|
||||||
|
"dependencies": {
|
||||||
|
"inherits": "^2.0.3",
|
||||||
|
"string_decoder": "^1.1.1",
|
||||||
|
"util-deprecate": "^1.0.1"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 6"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/array-flatten": {
|
"node_modules/array-flatten": {
|
||||||
"version": "1.1.1",
|
"version": "1.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz",
|
"resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz",
|
||||||
@@ -74,10 +156,18 @@
|
|||||||
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
|
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
|
||||||
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="
|
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="
|
||||||
},
|
},
|
||||||
"node_modules/bcryptjs": {
|
"node_modules/bcrypt": {
|
||||||
"version": "2.4.3",
|
"version": "5.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/bcryptjs/-/bcryptjs-2.4.3.tgz",
|
"resolved": "https://registry.npmjs.org/bcrypt/-/bcrypt-5.1.1.tgz",
|
||||||
"integrity": "sha512-V/Hy/X9Vt7f3BbPJEi8BdVFMByHi+jNXrYkW3huaybV/kQ0KJg0Y6PkEMbn+zeT+i+SiKZ/HMqJGIIt4LZDqNQ=="
|
"integrity": "sha512-AGBHOG5hPYZ5Xl9KXzU5iKq9516yEmvCKDg3ecP5kX2aB6UqTeXZxk2ELnDgDm6BQSMlLt9rDB4LoSMx0rYwww==",
|
||||||
|
"hasInstallScript": true,
|
||||||
|
"dependencies": {
|
||||||
|
"@mapbox/node-pre-gyp": "^1.0.11",
|
||||||
|
"node-addon-api": "^5.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 10.0.0"
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"node_modules/bignumber.js": {
|
"node_modules/bignumber.js": {
|
||||||
"version": "9.0.0",
|
"version": "9.0.0",
|
||||||
@@ -203,11 +293,32 @@
|
|||||||
"fsevents": "~2.3.2"
|
"fsevents": "~2.3.2"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/chownr": {
|
||||||
|
"version": "2.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz",
|
||||||
|
"integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=10"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/color-support": {
|
||||||
|
"version": "1.1.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz",
|
||||||
|
"integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==",
|
||||||
|
"bin": {
|
||||||
|
"color-support": "bin.js"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/concat-map": {
|
"node_modules/concat-map": {
|
||||||
"version": "0.0.1",
|
"version": "0.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
|
||||||
"integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg=="
|
"integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg=="
|
||||||
},
|
},
|
||||||
|
"node_modules/console-control-strings": {
|
||||||
|
"version": "1.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz",
|
||||||
|
"integrity": "sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ=="
|
||||||
|
},
|
||||||
"node_modules/content-disposition": {
|
"node_modules/content-disposition": {
|
||||||
"version": "0.5.4",
|
"version": "0.5.4",
|
||||||
"resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz",
|
"resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz",
|
||||||
@@ -294,6 +405,11 @@
|
|||||||
"node": ">= 0.4"
|
"node": ">= 0.4"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/delegates": {
|
||||||
|
"version": "1.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz",
|
||||||
|
"integrity": "sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ=="
|
||||||
|
},
|
||||||
"node_modules/depd": {
|
"node_modules/depd": {
|
||||||
"version": "2.0.0",
|
"version": "2.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz",
|
||||||
@@ -311,6 +427,14 @@
|
|||||||
"npm": "1.2.8000 || >= 1.4.16"
|
"npm": "1.2.8000 || >= 1.4.16"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/detect-libc": {
|
||||||
|
"version": "2.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.2.tgz",
|
||||||
|
"integrity": "sha512-UX6sGumvvqSaXgdKGUsgZWqcUyIXZ/vZTrlRT/iobiKhGL0zL4d3osHj3uqllWJK+i+sixDS/3COVEOFbupFyw==",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=8"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/dotenv": {
|
"node_modules/dotenv": {
|
||||||
"version": "16.3.1",
|
"version": "16.3.1",
|
||||||
"resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.3.1.tgz",
|
"resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.3.1.tgz",
|
||||||
@@ -327,6 +451,11 @@
|
|||||||
"resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz",
|
"resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz",
|
||||||
"integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow=="
|
"integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow=="
|
||||||
},
|
},
|
||||||
|
"node_modules/emoji-regex": {
|
||||||
|
"version": "8.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
|
||||||
|
"integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="
|
||||||
|
},
|
||||||
"node_modules/encodeurl": {
|
"node_modules/encodeurl": {
|
||||||
"version": "1.0.2",
|
"version": "1.0.2",
|
||||||
"resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz",
|
"resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz",
|
||||||
@@ -438,6 +567,33 @@
|
|||||||
"node": ">= 0.6"
|
"node": ">= 0.6"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/fs-minipass": {
|
||||||
|
"version": "2.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz",
|
||||||
|
"integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==",
|
||||||
|
"dependencies": {
|
||||||
|
"minipass": "^3.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/fs-minipass/node_modules/minipass": {
|
||||||
|
"version": "3.3.6",
|
||||||
|
"resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz",
|
||||||
|
"integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==",
|
||||||
|
"dependencies": {
|
||||||
|
"yallist": "^4.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/fs.realpath": {
|
||||||
|
"version": "1.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
|
||||||
|
"integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw=="
|
||||||
|
},
|
||||||
"node_modules/fsevents": {
|
"node_modules/fsevents": {
|
||||||
"version": "2.3.3",
|
"version": "2.3.3",
|
||||||
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
|
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
|
||||||
@@ -459,6 +615,25 @@
|
|||||||
"url": "https://github.com/sponsors/ljharb"
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/gauge": {
|
||||||
|
"version": "3.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/gauge/-/gauge-3.0.2.tgz",
|
||||||
|
"integrity": "sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q==",
|
||||||
|
"dependencies": {
|
||||||
|
"aproba": "^1.0.3 || ^2.0.0",
|
||||||
|
"color-support": "^1.1.2",
|
||||||
|
"console-control-strings": "^1.0.0",
|
||||||
|
"has-unicode": "^2.0.1",
|
||||||
|
"object-assign": "^4.1.1",
|
||||||
|
"signal-exit": "^3.0.0",
|
||||||
|
"string-width": "^4.2.3",
|
||||||
|
"strip-ansi": "^6.0.1",
|
||||||
|
"wide-align": "^1.1.2"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=10"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/get-intrinsic": {
|
"node_modules/get-intrinsic": {
|
||||||
"version": "1.2.2",
|
"version": "1.2.2",
|
||||||
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.2.tgz",
|
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.2.tgz",
|
||||||
@@ -473,6 +648,25 @@
|
|||||||
"url": "https://github.com/sponsors/ljharb"
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/glob": {
|
||||||
|
"version": "7.2.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
|
||||||
|
"integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
|
||||||
|
"dependencies": {
|
||||||
|
"fs.realpath": "^1.0.0",
|
||||||
|
"inflight": "^1.0.4",
|
||||||
|
"inherits": "2",
|
||||||
|
"minimatch": "^3.1.1",
|
||||||
|
"once": "^1.3.0",
|
||||||
|
"path-is-absolute": "^1.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": "*"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/isaacs"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/glob-parent": {
|
"node_modules/glob-parent": {
|
||||||
"version": "5.1.2",
|
"version": "5.1.2",
|
||||||
"resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz",
|
"resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz",
|
||||||
@@ -556,6 +750,11 @@
|
|||||||
"url": "https://github.com/sponsors/ljharb"
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/has-unicode": {
|
||||||
|
"version": "2.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz",
|
||||||
|
"integrity": "sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ=="
|
||||||
|
},
|
||||||
"node_modules/hasown": {
|
"node_modules/hasown": {
|
||||||
"version": "2.0.0",
|
"version": "2.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.0.tgz",
|
||||||
@@ -595,6 +794,39 @@
|
|||||||
"node": ">= 0.8"
|
"node": ">= 0.8"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/https-proxy-agent": {
|
||||||
|
"version": "5.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz",
|
||||||
|
"integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==",
|
||||||
|
"dependencies": {
|
||||||
|
"agent-base": "6",
|
||||||
|
"debug": "4"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/https-proxy-agent/node_modules/debug": {
|
||||||
|
"version": "4.3.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
|
||||||
|
"integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==",
|
||||||
|
"dependencies": {
|
||||||
|
"ms": "2.1.2"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=6.0"
|
||||||
|
},
|
||||||
|
"peerDependenciesMeta": {
|
||||||
|
"supports-color": {
|
||||||
|
"optional": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/https-proxy-agent/node_modules/ms": {
|
||||||
|
"version": "2.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
|
||||||
|
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
|
||||||
|
},
|
||||||
"node_modules/iconv-lite": {
|
"node_modules/iconv-lite": {
|
||||||
"version": "0.4.24",
|
"version": "0.4.24",
|
||||||
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
|
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
|
||||||
@@ -611,6 +843,15 @@
|
|||||||
"resolved": "https://registry.npmjs.org/ignore-by-default/-/ignore-by-default-1.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/ignore-by-default/-/ignore-by-default-1.0.1.tgz",
|
||||||
"integrity": "sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA=="
|
"integrity": "sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA=="
|
||||||
},
|
},
|
||||||
|
"node_modules/inflight": {
|
||||||
|
"version": "1.0.6",
|
||||||
|
"resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
|
||||||
|
"integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==",
|
||||||
|
"dependencies": {
|
||||||
|
"once": "^1.3.0",
|
||||||
|
"wrappy": "1"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/inherits": {
|
"node_modules/inherits": {
|
||||||
"version": "2.0.4",
|
"version": "2.0.4",
|
||||||
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
|
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
|
||||||
@@ -643,6 +884,14 @@
|
|||||||
"node": ">=0.10.0"
|
"node": ">=0.10.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/is-fullwidth-code-point": {
|
||||||
|
"version": "3.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
|
||||||
|
"integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=8"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/is-glob": {
|
"node_modules/is-glob": {
|
||||||
"version": "4.0.3",
|
"version": "4.0.3",
|
||||||
"resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz",
|
"resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz",
|
||||||
@@ -678,6 +927,28 @@
|
|||||||
"node": ">=10"
|
"node": ">=10"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/make-dir": {
|
||||||
|
"version": "3.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz",
|
||||||
|
"integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==",
|
||||||
|
"dependencies": {
|
||||||
|
"semver": "^6.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=8"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/sindresorhus"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/make-dir/node_modules/semver": {
|
||||||
|
"version": "6.3.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
|
||||||
|
"integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
|
||||||
|
"bin": {
|
||||||
|
"semver": "bin/semver.js"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/media-typer": {
|
"node_modules/media-typer": {
|
||||||
"version": "0.3.0",
|
"version": "0.3.0",
|
||||||
"resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz",
|
"resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz",
|
||||||
@@ -748,6 +1019,48 @@
|
|||||||
"url": "https://github.com/sponsors/ljharb"
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/minipass": {
|
||||||
|
"version": "5.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz",
|
||||||
|
"integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/minizlib": {
|
||||||
|
"version": "2.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz",
|
||||||
|
"integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==",
|
||||||
|
"dependencies": {
|
||||||
|
"minipass": "^3.0.0",
|
||||||
|
"yallist": "^4.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/minizlib/node_modules/minipass": {
|
||||||
|
"version": "3.3.6",
|
||||||
|
"resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz",
|
||||||
|
"integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==",
|
||||||
|
"dependencies": {
|
||||||
|
"yallist": "^4.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/mkdirp": {
|
||||||
|
"version": "1.0.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz",
|
||||||
|
"integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==",
|
||||||
|
"bin": {
|
||||||
|
"mkdirp": "bin/cmd.js"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=10"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/ms": {
|
"node_modules/ms": {
|
||||||
"version": "2.0.0",
|
"version": "2.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
||||||
@@ -785,6 +1098,30 @@
|
|||||||
"resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz",
|
"resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz",
|
||||||
"integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw=="
|
"integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw=="
|
||||||
},
|
},
|
||||||
|
"node_modules/node-addon-api": {
|
||||||
|
"version": "5.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-5.1.0.tgz",
|
||||||
|
"integrity": "sha512-eh0GgfEkpnoWDq+VY8OyvYhFEzBk6jIYbRKdIlyTiAXIVJ8PyBaKb0rp7oDtoddbdoHWhq8wwr+XZ81F1rpNdA=="
|
||||||
|
},
|
||||||
|
"node_modules/node-fetch": {
|
||||||
|
"version": "2.7.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz",
|
||||||
|
"integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==",
|
||||||
|
"dependencies": {
|
||||||
|
"whatwg-url": "^5.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": "4.x || >=6.0.0"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"encoding": "^0.1.0"
|
||||||
|
},
|
||||||
|
"peerDependenciesMeta": {
|
||||||
|
"encoding": {
|
||||||
|
"optional": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/nodemon": {
|
"node_modules/nodemon": {
|
||||||
"version": "3.0.1",
|
"version": "3.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/nodemon/-/nodemon-3.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/nodemon/-/nodemon-3.0.1.tgz",
|
||||||
@@ -847,6 +1184,25 @@
|
|||||||
"node": ">=0.10.0"
|
"node": ">=0.10.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/npmlog": {
|
||||||
|
"version": "5.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/npmlog/-/npmlog-5.0.1.tgz",
|
||||||
|
"integrity": "sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw==",
|
||||||
|
"dependencies": {
|
||||||
|
"are-we-there-yet": "^2.0.0",
|
||||||
|
"console-control-strings": "^1.1.0",
|
||||||
|
"gauge": "^3.0.0",
|
||||||
|
"set-blocking": "^2.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/object-assign": {
|
||||||
|
"version": "4.1.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
|
||||||
|
"integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.10.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/object-inspect": {
|
"node_modules/object-inspect": {
|
||||||
"version": "1.13.1",
|
"version": "1.13.1",
|
||||||
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.1.tgz",
|
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.1.tgz",
|
||||||
@@ -866,6 +1222,14 @@
|
|||||||
"node": ">= 0.8"
|
"node": ">= 0.8"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/once": {
|
||||||
|
"version": "1.4.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
|
||||||
|
"integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==",
|
||||||
|
"dependencies": {
|
||||||
|
"wrappy": "1"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/parseurl": {
|
"node_modules/parseurl": {
|
||||||
"version": "1.3.3",
|
"version": "1.3.3",
|
||||||
"resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz",
|
"resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz",
|
||||||
@@ -874,6 +1238,14 @@
|
|||||||
"node": ">= 0.8"
|
"node": ">= 0.8"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/path-is-absolute": {
|
||||||
|
"version": "1.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
|
||||||
|
"integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.10.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/path-to-regexp": {
|
"node_modules/path-to-regexp": {
|
||||||
"version": "0.1.7",
|
"version": "0.1.7",
|
||||||
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz",
|
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz",
|
||||||
@@ -978,6 +1350,20 @@
|
|||||||
"node": ">=8.10.0"
|
"node": ">=8.10.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/rimraf": {
|
||||||
|
"version": "3.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
|
||||||
|
"integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==",
|
||||||
|
"dependencies": {
|
||||||
|
"glob": "^7.1.3"
|
||||||
|
},
|
||||||
|
"bin": {
|
||||||
|
"rimraf": "bin.js"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/isaacs"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/safe-buffer": {
|
"node_modules/safe-buffer": {
|
||||||
"version": "5.2.1",
|
"version": "5.2.1",
|
||||||
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
|
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
|
||||||
@@ -1058,6 +1444,11 @@
|
|||||||
"node": ">= 0.8.0"
|
"node": ">= 0.8.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/set-blocking": {
|
||||||
|
"version": "2.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz",
|
||||||
|
"integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw=="
|
||||||
|
},
|
||||||
"node_modules/set-function-length": {
|
"node_modules/set-function-length": {
|
||||||
"version": "1.1.1",
|
"version": "1.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.1.1.tgz",
|
"resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.1.1.tgz",
|
||||||
@@ -1090,6 +1481,11 @@
|
|||||||
"url": "https://github.com/sponsors/ljharb"
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/signal-exit": {
|
||||||
|
"version": "3.0.7",
|
||||||
|
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz",
|
||||||
|
"integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ=="
|
||||||
|
},
|
||||||
"node_modules/simple-update-notifier": {
|
"node_modules/simple-update-notifier": {
|
||||||
"version": "2.0.0",
|
"version": "2.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/simple-update-notifier/-/simple-update-notifier-2.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/simple-update-notifier/-/simple-update-notifier-2.0.0.tgz",
|
||||||
@@ -1138,6 +1534,30 @@
|
|||||||
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz",
|
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz",
|
||||||
"integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="
|
"integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="
|
||||||
},
|
},
|
||||||
|
"node_modules/string-width": {
|
||||||
|
"version": "4.2.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
|
||||||
|
"integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
|
||||||
|
"dependencies": {
|
||||||
|
"emoji-regex": "^8.0.0",
|
||||||
|
"is-fullwidth-code-point": "^3.0.0",
|
||||||
|
"strip-ansi": "^6.0.1"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/strip-ansi": {
|
||||||
|
"version": "6.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
|
||||||
|
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
|
||||||
|
"dependencies": {
|
||||||
|
"ansi-regex": "^5.0.1"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=8"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/supports-color": {
|
"node_modules/supports-color": {
|
||||||
"version": "5.5.0",
|
"version": "5.5.0",
|
||||||
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz",
|
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz",
|
||||||
@@ -1149,6 +1569,22 @@
|
|||||||
"node": ">=4"
|
"node": ">=4"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/tar": {
|
||||||
|
"version": "6.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/tar/-/tar-6.2.0.tgz",
|
||||||
|
"integrity": "sha512-/Wo7DcT0u5HUV486xg675HtjNd3BXZ6xDbzsCUZPt5iw8bTQ63bP0Raut3mvro9u+CUyq7YQd8Cx55fsZXxqLQ==",
|
||||||
|
"dependencies": {
|
||||||
|
"chownr": "^2.0.0",
|
||||||
|
"fs-minipass": "^2.0.0",
|
||||||
|
"minipass": "^5.0.0",
|
||||||
|
"minizlib": "^2.1.1",
|
||||||
|
"mkdirp": "^1.0.3",
|
||||||
|
"yallist": "^4.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=10"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/to-regex-range": {
|
"node_modules/to-regex-range": {
|
||||||
"version": "5.0.1",
|
"version": "5.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
|
||||||
@@ -1179,6 +1615,11 @@
|
|||||||
"nodetouch": "bin/nodetouch.js"
|
"nodetouch": "bin/nodetouch.js"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/tr46": {
|
||||||
|
"version": "0.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz",
|
||||||
|
"integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="
|
||||||
|
},
|
||||||
"node_modules/type-is": {
|
"node_modules/type-is": {
|
||||||
"version": "1.6.18",
|
"version": "1.6.18",
|
||||||
"resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz",
|
"resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz",
|
||||||
@@ -1257,11 +1698,38 @@
|
|||||||
"foreachasync": "^3.0.0"
|
"foreachasync": "^3.0.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/webidl-conversions": {
|
||||||
|
"version": "3.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
|
||||||
|
"integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="
|
||||||
|
},
|
||||||
|
"node_modules/whatwg-url": {
|
||||||
|
"version": "5.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz",
|
||||||
|
"integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==",
|
||||||
|
"dependencies": {
|
||||||
|
"tr46": "~0.0.3",
|
||||||
|
"webidl-conversions": "^3.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/wide-align": {
|
||||||
|
"version": "1.1.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz",
|
||||||
|
"integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==",
|
||||||
|
"dependencies": {
|
||||||
|
"string-width": "^1.0.2 || 2 || 3 || 4"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/wordwrap": {
|
"node_modules/wordwrap": {
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz",
|
||||||
"integrity": "sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q=="
|
"integrity": "sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q=="
|
||||||
},
|
},
|
||||||
|
"node_modules/wrappy": {
|
||||||
|
"version": "1.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
|
||||||
|
"integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="
|
||||||
|
},
|
||||||
"node_modules/yallist": {
|
"node_modules/yallist": {
|
||||||
"version": "4.0.0",
|
"version": "4.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
|
||||||
|
|||||||
74
node_modules/@mapbox/node-pre-gyp/.github/workflows/codeql.yml
generated
vendored
Normal file
74
node_modules/@mapbox/node-pre-gyp/.github/workflows/codeql.yml
generated
vendored
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
# For most projects, this workflow file will not need changing; you simply need
|
||||||
|
# to commit it to your repository.
|
||||||
|
#
|
||||||
|
# You may wish to alter this file to override the set of languages analyzed,
|
||||||
|
# or to provide custom queries or build logic.
|
||||||
|
#
|
||||||
|
# ******** NOTE ********
|
||||||
|
# We have attempted to detect the languages in your repository. Please check
|
||||||
|
# the `language` matrix defined below to confirm you have the correct set of
|
||||||
|
# supported CodeQL languages.
|
||||||
|
#
|
||||||
|
name: "CodeQL"
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ "master" ]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [ "master" ]
|
||||||
|
schedule:
|
||||||
|
- cron: '24 5 * * 4'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
analyze:
|
||||||
|
name: Analyze
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
actions: read
|
||||||
|
contents: read
|
||||||
|
security-events: write
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
language: [ 'javascript' ]
|
||||||
|
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
||||||
|
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
# Initializes the CodeQL tools for scanning.
|
||||||
|
- name: Initialize CodeQL
|
||||||
|
uses: github/codeql-action/init@v2
|
||||||
|
with:
|
||||||
|
languages: ${{ matrix.language }}
|
||||||
|
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||||
|
# By default, queries listed here will override any specified in a config file.
|
||||||
|
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||||
|
|
||||||
|
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
||||||
|
# queries: security-extended,security-and-quality
|
||||||
|
|
||||||
|
|
||||||
|
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, or Java).
|
||||||
|
# If this step fails, then you should remove it and run the build manually (see below)
|
||||||
|
- name: Autobuild
|
||||||
|
uses: github/codeql-action/autobuild@v2
|
||||||
|
|
||||||
|
# ℹ️ Command-line programs to run using the OS shell.
|
||||||
|
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||||
|
|
||||||
|
# If the Autobuild fails above, remove it and uncomment the following three lines.
|
||||||
|
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
|
||||||
|
|
||||||
|
# - run: |
|
||||||
|
# echo "Run, Build Application using script"
|
||||||
|
# ./location_of_script_within_repo/buildscript.sh
|
||||||
|
|
||||||
|
- name: Perform CodeQL Analysis
|
||||||
|
uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: "/language:${{matrix.language}}"
|
||||||
510
node_modules/@mapbox/node-pre-gyp/CHANGELOG.md
generated
vendored
Normal file
510
node_modules/@mapbox/node-pre-gyp/CHANGELOG.md
generated
vendored
Normal file
@@ -0,0 +1,510 @@
|
|||||||
|
# node-pre-gyp changelog
|
||||||
|
|
||||||
|
## 1.0.11
|
||||||
|
- Fixes dependabot alert [CVE-2021-44906](https://nvd.nist.gov/vuln/detail/CVE-2021-44906)
|
||||||
|
|
||||||
|
## 1.0.10
|
||||||
|
- Upgraded minimist to 1.2.6 to address dependabot alert [CVE-2021-44906](https://nvd.nist.gov/vuln/detail/CVE-2021-44906)
|
||||||
|
|
||||||
|
## 1.0.9
|
||||||
|
- Upgraded node-fetch to 2.6.7 to address [CVE-2022-0235](https://www.cve.org/CVERecord?id=CVE-2022-0235)
|
||||||
|
- Upgraded detect-libc to 2.0.0 to use non-blocking NodeJS(>=12) Report API
|
||||||
|
|
||||||
|
## 1.0.8
|
||||||
|
- Downgraded npmlog to maintain node v10 and v8 support (https://github.com/mapbox/node-pre-gyp/pull/624)
|
||||||
|
|
||||||
|
## 1.0.7
|
||||||
|
- Upgraded nyc and npmlog to address https://github.com/advisories/GHSA-93q8-gq69-wqmw
|
||||||
|
|
||||||
|
## 1.0.6
|
||||||
|
- Added node v17 to the internal node releases listing
|
||||||
|
- Upgraded various dependencies declared in package.json to latest major versions (node-fetch from 2.6.1 to 2.6.5, npmlog from 4.1.2 to 5.01, semver from 7.3.4 to 7.3.5, and tar from 6.1.0 to 6.1.11)
|
||||||
|
- Fixed bug in `staging_host` parameter (https://github.com/mapbox/node-pre-gyp/pull/590)
|
||||||
|
|
||||||
|
|
||||||
|
## 1.0.5
|
||||||
|
- Fix circular reference warning with node >= v14
|
||||||
|
|
||||||
|
## 1.0.4
|
||||||
|
- Added node v16 to the internal node releases listing
|
||||||
|
|
||||||
|
## 1.0.3
|
||||||
|
- Improved support configuring s3 uploads (solves https://github.com/mapbox/node-pre-gyp/issues/571)
|
||||||
|
- New options added in https://github.com/mapbox/node-pre-gyp/pull/576: 'bucket', 'region', and `s3ForcePathStyle`
|
||||||
|
|
||||||
|
## 1.0.2
|
||||||
|
- Fixed regression in proxy support (https://github.com/mapbox/node-pre-gyp/issues/572)
|
||||||
|
|
||||||
|
## 1.0.1
|
||||||
|
- Switched from mkdirp@1.0.4 to make-dir@3.1.0 to avoid this bug: https://github.com/isaacs/node-mkdirp/issues/31
|
||||||
|
|
||||||
|
## 1.0.0
|
||||||
|
- Module is now name-spaced at `@mapbox/node-pre-gyp` and the original `node-pre-gyp` is deprecated.
|
||||||
|
- New: support for staging and production s3 targets (see README.md)
|
||||||
|
- BREAKING: no longer supporting `node_pre_gyp_accessKeyId` & `node_pre_gyp_secretAccessKey`, use `AWS_ACCESS_KEY_ID` & `AWS_SECRET_ACCESS_KEY` instead to authenticate against s3 for `info`, `publish`, and `unpublish` commands.
|
||||||
|
- Dropped node v6 support, added node v14 support
|
||||||
|
- Switched tests to use mapbox-owned bucket for testing
|
||||||
|
- Added coverage tracking and linting with eslint
|
||||||
|
- Added back support for symlinks inside the tarball
|
||||||
|
- Upgraded all test apps to N-API/node-addon-api
|
||||||
|
- New: support for staging and production s3 targets (see README.md)
|
||||||
|
- Added `node_pre_gyp_s3_host` env var which has priority over the `--s3_host` option or default.
|
||||||
|
- Replaced needle with node-fetch
|
||||||
|
- Added proxy support for node-fetch
|
||||||
|
- Upgraded to mkdirp@1.x
|
||||||
|
|
||||||
|
## 0.17.0
|
||||||
|
- Got travis + appveyor green again
|
||||||
|
- Added support for more node versions
|
||||||
|
|
||||||
|
## 0.16.0
|
||||||
|
|
||||||
|
- Added Node 15 support in the local database (https://github.com/mapbox/node-pre-gyp/pull/520)
|
||||||
|
|
||||||
|
## 0.15.0
|
||||||
|
|
||||||
|
- Bump dependency on `mkdirp` from `^0.5.1` to `^0.5.3` (https://github.com/mapbox/node-pre-gyp/pull/492)
|
||||||
|
- Bump dependency on `needle` from `^2.2.1` to `^2.5.0` (https://github.com/mapbox/node-pre-gyp/pull/502)
|
||||||
|
- Added Node 14 support in the local database (https://github.com/mapbox/node-pre-gyp/pull/501)
|
||||||
|
|
||||||
|
## 0.14.0
|
||||||
|
|
||||||
|
- Defer modules requires in napi.js (https://github.com/mapbox/node-pre-gyp/pull/434)
|
||||||
|
- Bump dependency on `tar` from `^4` to `^4.4.2` (https://github.com/mapbox/node-pre-gyp/pull/454)
|
||||||
|
- Support extracting compiled binary from local offline mirror (https://github.com/mapbox/node-pre-gyp/pull/459)
|
||||||
|
- Added Node 13 support in the local database (https://github.com/mapbox/node-pre-gyp/pull/483)
|
||||||
|
|
||||||
|
## 0.13.0
|
||||||
|
|
||||||
|
- Added Node 12 support in the local database (https://github.com/mapbox/node-pre-gyp/pull/449)
|
||||||
|
|
||||||
|
## 0.12.0
|
||||||
|
|
||||||
|
- Fixed double-build problem with node v10 (https://github.com/mapbox/node-pre-gyp/pull/428)
|
||||||
|
- Added node 11 support in the local database (https://github.com/mapbox/node-pre-gyp/pull/422)
|
||||||
|
|
||||||
|
## 0.11.0
|
||||||
|
|
||||||
|
- Fixed double-install problem with node v10
|
||||||
|
- Significant N-API improvements (https://github.com/mapbox/node-pre-gyp/pull/405)
|
||||||
|
|
||||||
|
## 0.10.3
|
||||||
|
|
||||||
|
- Now will use `request` over `needle` if request is installed. By default `needle` is used for `https`. This should unbreak proxy support that regressed in v0.9.0
|
||||||
|
|
||||||
|
## 0.10.2
|
||||||
|
|
||||||
|
- Fixed rc/deep-extent security vulnerability
|
||||||
|
- Fixed broken reinstall script do to incorrectly named get_best_napi_version
|
||||||
|
|
||||||
|
## 0.10.1
|
||||||
|
|
||||||
|
- Fix needle error event (@medns)
|
||||||
|
|
||||||
|
## 0.10.0
|
||||||
|
|
||||||
|
- Allow for a single-level module path when packing @allenluce (https://github.com/mapbox/node-pre-gyp/pull/371)
|
||||||
|
- Log warnings instead of errors when falling back @xzyfer (https://github.com/mapbox/node-pre-gyp/pull/366)
|
||||||
|
- Add Node.js v10 support to tests (https://github.com/mapbox/node-pre-gyp/pull/372)
|
||||||
|
- Remove retire.js from CI (https://github.com/mapbox/node-pre-gyp/pull/372)
|
||||||
|
- Remove support for Node.js v4 due to [EOL on April 30th, 2018](https://github.com/nodejs/Release/blob/7dd52354049cae99eed0e9fe01345b0722a86fde/schedule.json#L14)
|
||||||
|
- Update appveyor tests to install default NPM version instead of NPM v2.x for all Windows builds (https://github.com/mapbox/node-pre-gyp/pull/375)
|
||||||
|
|
||||||
|
## 0.9.1
|
||||||
|
|
||||||
|
- Fixed regression (in v0.9.0) with support for http redirects @allenluce (https://github.com/mapbox/node-pre-gyp/pull/361)
|
||||||
|
|
||||||
|
## 0.9.0
|
||||||
|
|
||||||
|
- Switched from using `request` to `needle` to reduce size of module deps (https://github.com/mapbox/node-pre-gyp/pull/350)
|
||||||
|
|
||||||
|
## 0.8.0
|
||||||
|
|
||||||
|
- N-API support (@inspiredware)
|
||||||
|
|
||||||
|
## 0.7.1
|
||||||
|
|
||||||
|
- Upgraded to tar v4.x
|
||||||
|
|
||||||
|
## 0.7.0
|
||||||
|
|
||||||
|
- Updated request and hawk (#347)
|
||||||
|
- Dropped node v0.10.x support
|
||||||
|
|
||||||
|
## 0.6.40
|
||||||
|
|
||||||
|
- Improved error reporting if an install fails
|
||||||
|
|
||||||
|
## 0.6.39
|
||||||
|
|
||||||
|
- Support for node v9
|
||||||
|
- Support for versioning on `{libc}` to allow binaries to work on non-glic linux systems like alpine linux
|
||||||
|
|
||||||
|
|
||||||
|
## 0.6.38
|
||||||
|
|
||||||
|
- Maintaining compatibility (for v0.6.x series) with node v0.10.x
|
||||||
|
|
||||||
|
## 0.6.37
|
||||||
|
|
||||||
|
- Solved one part of #276: now now deduce the node ABI from the major version for node >= 2 even when not stored in the abi_crosswalk.json
|
||||||
|
- Fixed docs to avoid mentioning the deprecated and dangerous `prepublish` in package.json (#291)
|
||||||
|
- Add new node versions to crosswalk
|
||||||
|
- Ported tests to use tape instead of mocha
|
||||||
|
- Got appveyor tests passing by downgrading npm and node-gyp
|
||||||
|
|
||||||
|
## 0.6.36
|
||||||
|
|
||||||
|
- Removed the running of `testbinary` during install. Because this was regressed for so long, it is too dangerous to re-enable by default. Developers needing validation can call `node-pre-gyp testbinary` directory.
|
||||||
|
- Fixed regression in v0.6.35 for electron installs (now skipping binary validation which is not yet supported for electron)
|
||||||
|
|
||||||
|
## 0.6.35
|
||||||
|
|
||||||
|
- No longer recommending `npm ls` in `prepublish` (#291)
|
||||||
|
- Fixed testbinary command (#283) @szdavid92
|
||||||
|
|
||||||
|
## 0.6.34
|
||||||
|
|
||||||
|
- Added new node versions to crosswalk, including v8
|
||||||
|
- Upgraded deps to latest versions, started using `^` instead of `~` for all deps.
|
||||||
|
|
||||||
|
## 0.6.33
|
||||||
|
|
||||||
|
- Improved support for yarn
|
||||||
|
|
||||||
|
## 0.6.32
|
||||||
|
|
||||||
|
- Honor npm configuration for CA bundles (@heikkipora)
|
||||||
|
- Add node-pre-gyp and npm versions to user agent (@addaleax)
|
||||||
|
- Updated various deps
|
||||||
|
- Add known node version for v7.x
|
||||||
|
|
||||||
|
## 0.6.31
|
||||||
|
|
||||||
|
- Updated various deps
|
||||||
|
|
||||||
|
## 0.6.30
|
||||||
|
|
||||||
|
- Update to npmlog@4.x and semver@5.3.x
|
||||||
|
- Add known node version for v6.5.0
|
||||||
|
|
||||||
|
## 0.6.29
|
||||||
|
|
||||||
|
- Add known node versions for v0.10.45, v0.12.14, v4.4.4, v5.11.1, and v6.1.0
|
||||||
|
|
||||||
|
## 0.6.28
|
||||||
|
|
||||||
|
- Now more verbose when remote binaries are not available. This is needed since npm is increasingly more quiet by default
|
||||||
|
and users need to know why builds are falling back to source compiles that might then error out.
|
||||||
|
|
||||||
|
## 0.6.27
|
||||||
|
|
||||||
|
- Add known node version for node v6
|
||||||
|
- Stopped bundling dependencies
|
||||||
|
- Documented method for module authors to avoid bundling node-pre-gyp
|
||||||
|
- See https://github.com/mapbox/node-pre-gyp/tree/master#configuring for details
|
||||||
|
|
||||||
|
## 0.6.26
|
||||||
|
|
||||||
|
- Skip validation for nw runtime (https://github.com/mapbox/node-pre-gyp/pull/181) via @fleg
|
||||||
|
|
||||||
|
## 0.6.25
|
||||||
|
|
||||||
|
- Improved support for auto-detection of electron runtime in `node-pre-gyp.find()`
|
||||||
|
- Pull request from @enlight - https://github.com/mapbox/node-pre-gyp/pull/187
|
||||||
|
- Add known node version for 4.4.1 and 5.9.1
|
||||||
|
|
||||||
|
## 0.6.24
|
||||||
|
|
||||||
|
- Add known node version for 5.8.0, 5.9.0, and 4.4.0.
|
||||||
|
|
||||||
|
## 0.6.23
|
||||||
|
|
||||||
|
- Add known node version for 0.10.43, 0.12.11, 4.3.2, and 5.7.1.
|
||||||
|
|
||||||
|
## 0.6.22
|
||||||
|
|
||||||
|
- Add known node version for 4.3.1, and 5.7.0.
|
||||||
|
|
||||||
|
## 0.6.21
|
||||||
|
|
||||||
|
- Add known node version for 0.10.42, 0.12.10, 4.3.0, and 5.6.0.
|
||||||
|
|
||||||
|
## 0.6.20
|
||||||
|
|
||||||
|
- Add known node version for 4.2.5, 4.2.6, 5.4.0, 5.4.1,and 5.5.0.
|
||||||
|
|
||||||
|
## 0.6.19
|
||||||
|
|
||||||
|
- Add known node version for 4.2.4
|
||||||
|
|
||||||
|
## 0.6.18
|
||||||
|
|
||||||
|
- Add new known node versions for 0.10.x, 0.12.x, 4.x, and 5.x
|
||||||
|
|
||||||
|
## 0.6.17
|
||||||
|
|
||||||
|
- Re-tagged to fix packaging problem of `Error: Cannot find module 'isarray'`
|
||||||
|
|
||||||
|
## 0.6.16
|
||||||
|
|
||||||
|
- Added known version in crosswalk for 5.1.0.
|
||||||
|
|
||||||
|
## 0.6.15
|
||||||
|
|
||||||
|
- Upgraded tar-pack (https://github.com/mapbox/node-pre-gyp/issues/182)
|
||||||
|
- Support custom binary hosting mirror (https://github.com/mapbox/node-pre-gyp/pull/170)
|
||||||
|
- Added known version in crosswalk for 4.2.2.
|
||||||
|
|
||||||
|
## 0.6.14
|
||||||
|
|
||||||
|
- Added node 5.x version
|
||||||
|
|
||||||
|
## 0.6.13
|
||||||
|
|
||||||
|
- Added more known node 4.x versions
|
||||||
|
|
||||||
|
## 0.6.12
|
||||||
|
|
||||||
|
- Added support for [Electron](http://electron.atom.io/). Just pass the `--runtime=electron` flag when building/installing. Thanks @zcbenz
|
||||||
|
|
||||||
|
## 0.6.11
|
||||||
|
|
||||||
|
- Added known node and io.js versions including more 3.x and 4.x versions
|
||||||
|
|
||||||
|
## 0.6.10
|
||||||
|
|
||||||
|
- Added known node and io.js versions including 3.x and 4.x versions
|
||||||
|
- Upgraded `tar` dep
|
||||||
|
|
||||||
|
## 0.6.9
|
||||||
|
|
||||||
|
- Upgraded `rc` dep
|
||||||
|
- Updated known io.js version: v2.4.0
|
||||||
|
|
||||||
|
## 0.6.8
|
||||||
|
|
||||||
|
- Upgraded `semver` and `rimraf` deps
|
||||||
|
- Updated known node and io.js versions
|
||||||
|
|
||||||
|
## 0.6.7
|
||||||
|
|
||||||
|
- Fixed `node_abi` versions for io.js 1.1.x -> 1.8.x (should be 43, but was stored as 42) (refs https://github.com/iojs/build/issues/94)
|
||||||
|
|
||||||
|
## 0.6.6
|
||||||
|
|
||||||
|
- Updated with known io.js 2.0.0 version
|
||||||
|
|
||||||
|
## 0.6.5
|
||||||
|
|
||||||
|
- Now respecting `npm_config_node_gyp` (https://github.com/npm/npm/pull/4887)
|
||||||
|
- Updated to semver@4.3.2
|
||||||
|
- Updated known node v0.12.x versions and io.js 1.x versions.
|
||||||
|
|
||||||
|
## 0.6.4
|
||||||
|
|
||||||
|
- Improved support for `io.js` (@fengmk2)
|
||||||
|
- Test coverage improvements (@mikemorris)
|
||||||
|
- Fixed support for `--dist-url` that regressed in 0.6.3
|
||||||
|
|
||||||
|
## 0.6.3
|
||||||
|
|
||||||
|
- Added support for passing raw options to node-gyp using `--` separator. Flags passed after
|
||||||
|
the `--` to `node-pre-gyp configure` will be passed directly to gyp while flags passed
|
||||||
|
after the `--` will be passed directly to make/visual studio.
|
||||||
|
- Added `node-pre-gyp configure` command to be able to call `node-gyp configure` directly
|
||||||
|
- Fix issue with require validation not working on windows 7 (@edgarsilva)
|
||||||
|
|
||||||
|
## 0.6.2
|
||||||
|
|
||||||
|
- Support for io.js >= v1.0.2
|
||||||
|
- Deferred require of `request` and `tar` to help speed up command line usage of `node-pre-gyp`.
|
||||||
|
|
||||||
|
## 0.6.1
|
||||||
|
|
||||||
|
- Fixed bundled `tar` version
|
||||||
|
|
||||||
|
## 0.6.0
|
||||||
|
|
||||||
|
- BREAKING: node odd releases like v0.11.x now use `major.minor.patch` for `{node_abi}` instead of `NODE_MODULE_VERSION` (#124)
|
||||||
|
- Added support for `toolset` option in versioning. By default is an empty string but `--toolset` can be passed to publish or install to select alternative binaries that target a custom toolset like C++11. For example to target Visual Studio 2014 modules like node-sqlite3 use `--toolset=v140`.
|
||||||
|
- Added support for `--no-rollback` option to request that a failed binary test does not remove the binary module leaves it in place.
|
||||||
|
- Added support for `--update-binary` option to request an existing binary be re-installed and the check for a valid local module be skipped.
|
||||||
|
- Added support for passing build options from `npm` through `node-pre-gyp` to `node-gyp`: `--nodedir`, `--disturl`, `--python`, and `--msvs_version`
|
||||||
|
|
||||||
|
## 0.5.31
|
||||||
|
|
||||||
|
- Added support for deducing node_abi for node.js runtime from previous release if the series is even
|
||||||
|
- Added support for --target=0.10.33
|
||||||
|
|
||||||
|
## 0.5.30
|
||||||
|
|
||||||
|
- Repackaged with latest bundled deps
|
||||||
|
|
||||||
|
## 0.5.29
|
||||||
|
|
||||||
|
- Added support for semver `build`.
|
||||||
|
- Fixed support for downloading from urls that include `+`.
|
||||||
|
|
||||||
|
## 0.5.28
|
||||||
|
|
||||||
|
- Now reporting unix style paths only in reveal command
|
||||||
|
|
||||||
|
## 0.5.27
|
||||||
|
|
||||||
|
- Fixed support for auto-detecting s3 bucket name when it contains `.` - @taavo
|
||||||
|
- Fixed support for installing when path contains a `'` - @halfdan
|
||||||
|
- Ported tests to mocha
|
||||||
|
|
||||||
|
## 0.5.26
|
||||||
|
|
||||||
|
- Fix node-webkit support when `--target` option is not provided
|
||||||
|
|
||||||
|
## 0.5.25
|
||||||
|
|
||||||
|
- Fix bundling of deps
|
||||||
|
|
||||||
|
## 0.5.24
|
||||||
|
|
||||||
|
- Updated ABI crosswalk to incldue node v0.10.30 and v0.10.31
|
||||||
|
|
||||||
|
## 0.5.23
|
||||||
|
|
||||||
|
- Added `reveal` command. Pass no options to get all versioning data as json. Pass a second arg to grab a single versioned property value
|
||||||
|
- Added support for `--silent` (shortcut for `--loglevel=silent`)
|
||||||
|
|
||||||
|
## 0.5.22
|
||||||
|
|
||||||
|
- Fixed node-webkit versioning name (NOTE: node-webkit support still experimental)
|
||||||
|
|
||||||
|
## 0.5.21
|
||||||
|
|
||||||
|
- New package to fix `shasum check failed` error with v0.5.20
|
||||||
|
|
||||||
|
## 0.5.20
|
||||||
|
|
||||||
|
- Now versioning node-webkit binaries based on major.minor.patch - assuming no compatible ABI across versions (#90)
|
||||||
|
|
||||||
|
## 0.5.19
|
||||||
|
|
||||||
|
- Updated to know about more node-webkit releases
|
||||||
|
|
||||||
|
## 0.5.18
|
||||||
|
|
||||||
|
- Updated to know about more node-webkit releases
|
||||||
|
|
||||||
|
## 0.5.17
|
||||||
|
|
||||||
|
- Updated to know about node v0.10.29 release
|
||||||
|
|
||||||
|
## 0.5.16
|
||||||
|
|
||||||
|
- Now supporting all aws-sdk configuration parameters (http://docs.aws.amazon.com/AWSJavaScriptSDK/guide/node-configuring.html) (#86)
|
||||||
|
|
||||||
|
## 0.5.15
|
||||||
|
|
||||||
|
- Fixed installation of windows packages sub directories on unix systems (#84)
|
||||||
|
|
||||||
|
## 0.5.14
|
||||||
|
|
||||||
|
- Finished support for cross building using `--target_platform` option (#82)
|
||||||
|
- Now skipping binary validation on install if target arch/platform do not match the host.
|
||||||
|
- Removed multi-arch validing for OS X since it required a FAT node.js binary
|
||||||
|
|
||||||
|
## 0.5.13
|
||||||
|
|
||||||
|
- Fix problem in 0.5.12 whereby the wrong versions of mkdirp and semver where bundled.
|
||||||
|
|
||||||
|
## 0.5.12
|
||||||
|
|
||||||
|
- Improved support for node-webkit (@Mithgol)
|
||||||
|
|
||||||
|
## 0.5.11
|
||||||
|
|
||||||
|
- Updated target versions listing
|
||||||
|
|
||||||
|
## 0.5.10
|
||||||
|
|
||||||
|
- Fixed handling of `-debug` flag passed directory to node-pre-gyp (#72)
|
||||||
|
- Added optional second arg to `node_pre_gyp.find` to customize the default versioning options used to locate the runtime binary
|
||||||
|
- Failed install due to `testbinary` check failure no longer leaves behind binary (#70)
|
||||||
|
|
||||||
|
## 0.5.9
|
||||||
|
|
||||||
|
- Fixed regression in `testbinary` command causing installs to fail on windows with 0.5.7 (#60)
|
||||||
|
|
||||||
|
## 0.5.8
|
||||||
|
|
||||||
|
- Started bundling deps
|
||||||
|
|
||||||
|
## 0.5.7
|
||||||
|
|
||||||
|
- Fixed the `testbinary` check, which is used to determine whether to re-download or source compile, to work even in complex dependency situations (#63)
|
||||||
|
- Exposed the internal `testbinary` command in node-pre-gyp command line tool
|
||||||
|
- Fixed minor bug so that `fallback_to_build` option is always respected
|
||||||
|
|
||||||
|
## 0.5.6
|
||||||
|
|
||||||
|
- Added support for versioning on the `name` value in `package.json` (#57).
|
||||||
|
- Moved to using streams for reading tarball when publishing (#52)
|
||||||
|
|
||||||
|
## 0.5.5
|
||||||
|
|
||||||
|
- Improved binary validation that also now works with node-webkit (@Mithgol)
|
||||||
|
- Upgraded test apps to work with node v0.11.x
|
||||||
|
- Improved test coverage
|
||||||
|
|
||||||
|
## 0.5.4
|
||||||
|
|
||||||
|
- No longer depends on external install of node-gyp for compiling builds.
|
||||||
|
|
||||||
|
## 0.5.3
|
||||||
|
|
||||||
|
- Reverted fix for debian/nodejs since it broke windows (#45)
|
||||||
|
|
||||||
|
## 0.5.2
|
||||||
|
|
||||||
|
- Support for debian systems where the node binary is named `nodejs` (#45)
|
||||||
|
- Added `bin/node-pre-gyp.cmd` to be able to run command on windows locally (npm creates an .npm automatically when globally installed)
|
||||||
|
- Updated abi-crosswalk with node v0.10.26 entry.
|
||||||
|
|
||||||
|
## 0.5.1
|
||||||
|
|
||||||
|
- Various minor bug fixes, several improving windows support for publishing.
|
||||||
|
|
||||||
|
## 0.5.0
|
||||||
|
|
||||||
|
- Changed property names in `binary` object: now required are `module_name`, `module_path`, and `host`.
|
||||||
|
- Now `module_path` supports versioning, which allows developers to opt-in to using a versioned install path (#18).
|
||||||
|
- Added `remote_path` which also supports versioning.
|
||||||
|
- Changed `remote_uri` to `host`.
|
||||||
|
|
||||||
|
## 0.4.2
|
||||||
|
|
||||||
|
- Added support for `--target` flag to request cross-compile against a specific node/node-webkit version.
|
||||||
|
- Added preliminary support for node-webkit
|
||||||
|
- Fixed support for `--target_arch` option being respected in all cases.
|
||||||
|
|
||||||
|
## 0.4.1
|
||||||
|
|
||||||
|
- Fixed exception when only stderr is available in binary test (@bendi / #31)
|
||||||
|
|
||||||
|
## 0.4.0
|
||||||
|
|
||||||
|
- Enforce only `https:` based remote publishing access.
|
||||||
|
- Added `node-pre-gyp info` command to display listing of published binaries
|
||||||
|
- Added support for changing the directory node-pre-gyp should build in with the `-C/--directory` option.
|
||||||
|
- Added support for S3 prefixes.
|
||||||
|
|
||||||
|
## 0.3.1
|
||||||
|
|
||||||
|
- Added `unpublish` command.
|
||||||
|
- Fixed module path construction in tests.
|
||||||
|
- Added ability to disable falling back to build behavior via `npm install --fallback-to-build=false` which overrides setting in a depedencies package.json `install` target.
|
||||||
|
|
||||||
|
## 0.3.0
|
||||||
|
|
||||||
|
- Support for packaging all files in `module_path` directory - see `app4` for example
|
||||||
|
- Added `testpackage` command.
|
||||||
|
- Changed `clean` command to only delete `.node` not entire `build` directory since node-gyp will handle that.
|
||||||
|
- `.node` modules must be in a folder of there own since tar-pack will remove everything when it unpacks.
|
||||||
27
node_modules/@mapbox/node-pre-gyp/LICENSE
generated
vendored
Normal file
27
node_modules/@mapbox/node-pre-gyp/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
Copyright (c), Mapbox
|
||||||
|
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
* Redistributions of source code must retain the above copyright notice,
|
||||||
|
this list of conditions and the following disclaimer.
|
||||||
|
* Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
this list of conditions and the following disclaimer in the documentation
|
||||||
|
and/or other materials provided with the distribution.
|
||||||
|
* Neither the name of node-pre-gyp nor the names of its contributors
|
||||||
|
may be used to endorse or promote products derived from this software
|
||||||
|
without specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
|
||||||
|
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
|
||||||
|
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||||
|
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||||
|
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||||
|
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||||
|
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
742
node_modules/@mapbox/node-pre-gyp/README.md
generated
vendored
Normal file
742
node_modules/@mapbox/node-pre-gyp/README.md
generated
vendored
Normal file
@@ -0,0 +1,742 @@
|
|||||||
|
# @mapbox/node-pre-gyp
|
||||||
|
|
||||||
|
#### @mapbox/node-pre-gyp makes it easy to publish and install Node.js C++ addons from binaries
|
||||||
|
|
||||||
|
[](https://travis-ci.com/mapbox/node-pre-gyp)
|
||||||
|
[](https://ci.appveyor.com/project/Mapbox/node-pre-gyp)
|
||||||
|
|
||||||
|
`@mapbox/node-pre-gyp` stands between [npm](https://github.com/npm/npm) and [node-gyp](https://github.com/Tootallnate/node-gyp) and offers a cross-platform method of binary deployment.
|
||||||
|
|
||||||
|
### Special note on previous package
|
||||||
|
|
||||||
|
On Feb 9th, 2021 `@mapbox/node-pre-gyp@1.0.0` was [released](./CHANGELOG.md). Older, unscoped versions that are not part of the `@mapbox` org are deprecated and only `@mapbox/node-pre-gyp` will see updates going forward. To upgrade to the new package do:
|
||||||
|
|
||||||
|
```
|
||||||
|
npm uninstall node-pre-gyp --save
|
||||||
|
npm install @mapbox/node-pre-gyp --save
|
||||||
|
```
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- A command line tool called `node-pre-gyp` that can install your package's C++ module from a binary.
|
||||||
|
- A variety of developer targeted commands for packaging, testing, and publishing binaries.
|
||||||
|
- A JavaScript module that can dynamically require your installed binary: `require('@mapbox/node-pre-gyp').find`
|
||||||
|
|
||||||
|
For a hello world example of a module packaged with `node-pre-gyp` see <https://github.com/springmeyer/node-addon-example> and [the wiki ](https://github.com/mapbox/node-pre-gyp/wiki/Modules-using-node-pre-gyp) for real world examples.
|
||||||
|
|
||||||
|
## Credits
|
||||||
|
|
||||||
|
- The module is modeled after [node-gyp](https://github.com/Tootallnate/node-gyp) by [@Tootallnate](https://github.com/Tootallnate)
|
||||||
|
- Motivation for initial development came from [@ErisDS](https://github.com/ErisDS) and the [Ghost Project](https://github.com/TryGhost/Ghost).
|
||||||
|
- Development is sponsored by [Mapbox](https://www.mapbox.com/)
|
||||||
|
|
||||||
|
## FAQ
|
||||||
|
|
||||||
|
See the [Frequently Ask Questions](https://github.com/mapbox/node-pre-gyp/wiki/FAQ).
|
||||||
|
|
||||||
|
## Depends
|
||||||
|
|
||||||
|
- Node.js >= node v8.x
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
`node-pre-gyp` is designed to be installed as a local dependency of your Node.js C++ addon and accessed like:
|
||||||
|
|
||||||
|
./node_modules/.bin/node-pre-gyp --help
|
||||||
|
|
||||||
|
But you can also install it globally:
|
||||||
|
|
||||||
|
npm install @mapbox/node-pre-gyp -g
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Commands
|
||||||
|
|
||||||
|
View all possible commands:
|
||||||
|
|
||||||
|
node-pre-gyp --help
|
||||||
|
|
||||||
|
- clean - Remove the entire folder containing the compiled .node module
|
||||||
|
- install - Install pre-built binary for module
|
||||||
|
- reinstall - Run "clean" and "install" at once
|
||||||
|
- build - Compile the module by dispatching to node-gyp or nw-gyp
|
||||||
|
- rebuild - Run "clean" and "build" at once
|
||||||
|
- package - Pack binary into tarball
|
||||||
|
- testpackage - Test that the staged package is valid
|
||||||
|
- publish - Publish pre-built binary
|
||||||
|
- unpublish - Unpublish pre-built binary
|
||||||
|
- info - Fetch info on published binaries
|
||||||
|
|
||||||
|
You can also chain commands:
|
||||||
|
|
||||||
|
node-pre-gyp clean build unpublish publish info
|
||||||
|
|
||||||
|
### Options
|
||||||
|
|
||||||
|
Options include:
|
||||||
|
|
||||||
|
- `-C/--directory`: run the command in this directory
|
||||||
|
- `--build-from-source`: build from source instead of using pre-built binary
|
||||||
|
- `--update-binary`: reinstall by replacing previously installed local binary with remote binary
|
||||||
|
- `--runtime=node-webkit`: customize the runtime: `node`, `electron` and `node-webkit` are the valid options
|
||||||
|
- `--fallback-to-build`: fallback to building from source if pre-built binary is not available
|
||||||
|
- `--target=0.4.0`: Pass the target node or node-webkit version to compile against
|
||||||
|
- `--target_arch=ia32`: Pass the target arch and override the host `arch`. Any value that is [supported by Node.js](https://nodejs.org/api/os.html#osarch) is valid.
|
||||||
|
- `--target_platform=win32`: Pass the target platform and override the host `platform`. Valid values are `linux`, `darwin`, `win32`, `sunos`, `freebsd`, `openbsd`, and `aix`.
|
||||||
|
|
||||||
|
Both `--build-from-source` and `--fallback-to-build` can be passed alone or they can provide values. You can pass `--fallback-to-build=false` to override the option as declared in package.json. In addition to being able to pass `--build-from-source` you can also pass `--build-from-source=myapp` where `myapp` is the name of your module.
|
||||||
|
|
||||||
|
For example: `npm install --build-from-source=myapp`. This is useful if:
|
||||||
|
|
||||||
|
- `myapp` is referenced in the package.json of a larger app and therefore `myapp` is being installed as a dependency with `npm install`.
|
||||||
|
- The larger app also depends on other modules installed with `node-pre-gyp`
|
||||||
|
- You only want to trigger a source compile for `myapp` and the other modules.
|
||||||
|
|
||||||
|
### Configuring
|
||||||
|
|
||||||
|
This is a guide to configuring your module to use node-pre-gyp.
|
||||||
|
|
||||||
|
#### 1) Add new entries to your `package.json`
|
||||||
|
|
||||||
|
- Add `@mapbox/node-pre-gyp` to `dependencies`
|
||||||
|
- Add `aws-sdk` as a `devDependency`
|
||||||
|
- Add a custom `install` script
|
||||||
|
- Declare a `binary` object
|
||||||
|
|
||||||
|
This looks like:
|
||||||
|
|
||||||
|
```js
|
||||||
|
"dependencies" : {
|
||||||
|
"@mapbox/node-pre-gyp": "1.x"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"aws-sdk": "2.x"
|
||||||
|
}
|
||||||
|
"scripts": {
|
||||||
|
"install": "node-pre-gyp install --fallback-to-build"
|
||||||
|
},
|
||||||
|
"binary": {
|
||||||
|
"module_name": "your_module",
|
||||||
|
"module_path": "./lib/binding/",
|
||||||
|
"host": "https://your_module.s3-us-west-1.amazonaws.com"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
For a full example see [node-addon-examples's package.json](https://github.com/springmeyer/node-addon-example/blob/master/package.json).
|
||||||
|
|
||||||
|
Let's break this down:
|
||||||
|
|
||||||
|
- Dependencies need to list `node-pre-gyp`
|
||||||
|
- Your devDependencies should list `aws-sdk` so that you can run `node-pre-gyp publish` locally or a CI system. We recommend using `devDependencies` only since `aws-sdk` is large and not needed for `node-pre-gyp install` since it only uses http to fetch binaries
|
||||||
|
- Your `scripts` section should override the `install` target with `"install": "node-pre-gyp install --fallback-to-build"`. This allows node-pre-gyp to be used instead of the default npm behavior of always source compiling with `node-gyp` directly.
|
||||||
|
- Your package.json should contain a `binary` section describing key properties you provide to allow node-pre-gyp to package optimally. They are detailed below.
|
||||||
|
|
||||||
|
Note: in the past we recommended putting `@mapbox/node-pre-gyp` in the `bundledDependencies`, but we no longer recommend this. In the past there were npm bugs (with node versions 0.10.x) that could lead to node-pre-gyp not being available at the right time during install (unless we bundled). This should no longer be the case. Also, for a time we recommended using `"preinstall": "npm install @mapbox/node-pre-gyp"` as an alternative method to avoid needing to bundle. But this did not behave predictably across all npm versions - see https://github.com/mapbox/node-pre-gyp/issues/260 for the details. So we do not recommend using `preinstall` to install `@mapbox/node-pre-gyp`. More history on this at https://github.com/strongloop/fsevents/issues/157#issuecomment-265545908.
|
||||||
|
|
||||||
|
##### The `binary` object has three required properties
|
||||||
|
|
||||||
|
###### module_name
|
||||||
|
|
||||||
|
The name of your native node module. This value must:
|
||||||
|
|
||||||
|
- Match the name passed to [the NODE_MODULE macro](http://nodejs.org/api/addons.html#addons_hello_world)
|
||||||
|
- Must be a valid C variable name (e.g. it cannot contain `-`)
|
||||||
|
- Should not include the `.node` extension.
|
||||||
|
|
||||||
|
###### module_path
|
||||||
|
|
||||||
|
The location your native module is placed after a build. This should be an empty directory without other Javascript files. This entire directory will be packaged in the binary tarball. When installing from a remote package this directory will be overwritten with the contents of the tarball.
|
||||||
|
|
||||||
|
Note: This property supports variables based on [Versioning](#versioning).
|
||||||
|
|
||||||
|
###### host
|
||||||
|
|
||||||
|
A url to the remote location where you've published tarball binaries (must be `https` not `http`).
|
||||||
|
|
||||||
|
It is highly recommended that you use Amazon S3. The reasons are:
|
||||||
|
|
||||||
|
- Various node-pre-gyp commands like `publish` and `info` only work with an S3 host.
|
||||||
|
- S3 is a very solid hosting platform for distributing large files.
|
||||||
|
- We provide detail documentation for using [S3 hosting](#s3-hosting) with node-pre-gyp.
|
||||||
|
|
||||||
|
Why then not require S3? Because while some applications using node-pre-gyp need to distribute binaries as large as 20-30 MB, others might have very small binaries and might wish to store them in a GitHub repo. This is not recommended, but if an author really wants to host in a non-S3 location then it should be possible.
|
||||||
|
|
||||||
|
It should also be mentioned that there is an optional and entirely separate npm module called [node-pre-gyp-github](https://github.com/bchr02/node-pre-gyp-github) which is intended to complement node-pre-gyp and be installed along with it. It provides the ability to store and publish your binaries within your repositories GitHub Releases if you would rather not use S3 directly. Installation and usage instructions can be found [here](https://github.com/bchr02/node-pre-gyp-github), but the basic premise is that instead of using the ```node-pre-gyp publish``` command you would use ```node-pre-gyp-github publish```.
|
||||||
|
|
||||||
|
##### The `binary` object other optional S3 properties
|
||||||
|
|
||||||
|
If you are not using a standard s3 path like `bucket_name.s3(.-)region.amazonaws.com`, you might get an error on `publish` because node-pre-gyp extracts the region and bucket from the `host` url. For example, you may have an on-premises s3-compatible storage server, or may have configured a specific dns redirecting to an s3 endpoint. In these cases, you can explicitly set the `region` and `bucket` properties to tell node-pre-gyp to use these values instead of guessing from the `host` property. The following values can be used in the `binary` section:
|
||||||
|
|
||||||
|
###### host
|
||||||
|
|
||||||
|
The url to the remote server root location (must be `https` not `http`).
|
||||||
|
|
||||||
|
###### bucket
|
||||||
|
|
||||||
|
The bucket name where your tarball binaries should be located.
|
||||||
|
|
||||||
|
###### region
|
||||||
|
|
||||||
|
Your S3 server region.
|
||||||
|
|
||||||
|
###### s3ForcePathStyle
|
||||||
|
|
||||||
|
Set `s3ForcePathStyle` to true if the endpoint url should not be prefixed with the bucket name. If false (default), the server endpoint would be constructed as `bucket_name.your_server.com`.
|
||||||
|
|
||||||
|
##### The `binary` object has optional properties
|
||||||
|
|
||||||
|
###### remote_path
|
||||||
|
|
||||||
|
It **is recommended** that you customize this property. This is an extra path to use for publishing and finding remote tarballs. The default value for `remote_path` is `""` meaning that if you do not provide it then all packages will be published at the base of the `host`. It is recommended to provide a value like `./{name}/v{version}` to help organize remote packages in the case that you choose to publish multiple node addons to the same `host`.
|
||||||
|
|
||||||
|
Note: This property supports variables based on [Versioning](#versioning).
|
||||||
|
|
||||||
|
###### package_name
|
||||||
|
|
||||||
|
It is **not recommended** to override this property unless you are also overriding the `remote_path`. This is the versioned name of the remote tarball containing the binary `.node` module and any supporting files you've placed inside the `module_path` directory. Unless you specify `package_name` in your `package.json` then it defaults to `{module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz` which allows your binary to work across node versions, platforms, and architectures. If you are using `remote_path` that is also versioned by `./{module_name}/v{version}` then you could remove these variables from the `package_name` and just use: `{node_abi}-{platform}-{arch}.tar.gz`. Then your remote tarball will be looked up at, for example, `https://example.com/your-module/v0.1.0/node-v11-linux-x64.tar.gz`.
|
||||||
|
|
||||||
|
Avoiding the version of your module in the `package_name` and instead only embedding in a directory name can be useful when you want to make a quick tag of your module that does not change any C++ code. In this case you can just copy binaries to the new version behind the scenes like:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
aws s3 sync --acl public-read s3://mapbox-node-binary/sqlite3/v3.0.3/ s3://mapbox-node-binary/sqlite3/v3.0.4/
|
||||||
|
```
|
||||||
|
|
||||||
|
Note: This property supports variables based on [Versioning](#versioning).
|
||||||
|
|
||||||
|
#### 2) Add a new target to binding.gyp
|
||||||
|
|
||||||
|
`node-pre-gyp` calls out to `node-gyp` to compile the module and passes variables along like [module_name](#module_name) and [module_path](#module_path).
|
||||||
|
|
||||||
|
A new target must be added to `binding.gyp` that moves the compiled `.node` module from `./build/Release/module_name.node` into the directory specified by `module_path`.
|
||||||
|
|
||||||
|
Add a target like this at the end of your `targets` list:
|
||||||
|
|
||||||
|
```js
|
||||||
|
{
|
||||||
|
"target_name": "action_after_build",
|
||||||
|
"type": "none",
|
||||||
|
"dependencies": [ "<(module_name)" ],
|
||||||
|
"copies": [
|
||||||
|
{
|
||||||
|
"files": [ "<(PRODUCT_DIR)/<(module_name).node" ],
|
||||||
|
"destination": "<(module_path)"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
For a full example see [node-addon-example's binding.gyp](https://github.com/springmeyer/node-addon-example/blob/2ff60a8ded7f042864ad21db00c3a5a06cf47075/binding.gyp).
|
||||||
|
|
||||||
|
#### 3) Dynamically require your `.node`
|
||||||
|
|
||||||
|
Inside the main js file that requires your addon module you are likely currently doing:
|
||||||
|
|
||||||
|
```js
|
||||||
|
var binding = require('../build/Release/binding.node');
|
||||||
|
```
|
||||||
|
|
||||||
|
or:
|
||||||
|
|
||||||
|
```js
|
||||||
|
var bindings = require('./bindings')
|
||||||
|
```
|
||||||
|
|
||||||
|
Change those lines to:
|
||||||
|
|
||||||
|
```js
|
||||||
|
var binary = require('@mapbox/node-pre-gyp');
|
||||||
|
var path = require('path');
|
||||||
|
var binding_path = binary.find(path.resolve(path.join(__dirname,'./package.json')));
|
||||||
|
var binding = require(binding_path);
|
||||||
|
```
|
||||||
|
|
||||||
|
For a full example see [node-addon-example's index.js](https://github.com/springmeyer/node-addon-example/blob/2ff60a8ded7f042864ad21db00c3a5a06cf47075/index.js#L1-L4)
|
||||||
|
|
||||||
|
#### 4) Build and package your app
|
||||||
|
|
||||||
|
Now build your module from source:
|
||||||
|
|
||||||
|
npm install --build-from-source
|
||||||
|
|
||||||
|
The `--build-from-source` tells `node-pre-gyp` to not look for a remote package and instead dispatch to node-gyp to build.
|
||||||
|
|
||||||
|
Now `node-pre-gyp` should now also be installed as a local dependency so the command line tool it offers can be found at `./node_modules/.bin/node-pre-gyp`.
|
||||||
|
|
||||||
|
#### 5) Test
|
||||||
|
|
||||||
|
Now `npm test` should work just as it did before.
|
||||||
|
|
||||||
|
#### 6) Publish the tarball
|
||||||
|
|
||||||
|
Then package your app:
|
||||||
|
|
||||||
|
./node_modules/.bin/node-pre-gyp package
|
||||||
|
|
||||||
|
Once packaged, now you can publish:
|
||||||
|
|
||||||
|
./node_modules/.bin/node-pre-gyp publish
|
||||||
|
|
||||||
|
Currently the `publish` command pushes your binary to S3. This requires:
|
||||||
|
|
||||||
|
- You have installed `aws-sdk` with `npm install aws-sdk`
|
||||||
|
- You have created a bucket already.
|
||||||
|
- The `host` points to an S3 http or https endpoint.
|
||||||
|
- You have configured node-pre-gyp to read your S3 credentials (see [S3 hosting](#s3-hosting) for details).
|
||||||
|
|
||||||
|
You can also host your binaries elsewhere. To do this requires:
|
||||||
|
|
||||||
|
- You manually publish the binary created by the `package` command to an `https` endpoint
|
||||||
|
- Ensure that the `host` value points to your custom `https` endpoint.
|
||||||
|
|
||||||
|
#### 7) Automate builds
|
||||||
|
|
||||||
|
Now you need to publish builds for all the platforms and node versions you wish to support. This is best automated.
|
||||||
|
|
||||||
|
- See [Appveyor Automation](#appveyor-automation) for how to auto-publish builds on Windows.
|
||||||
|
- See [Travis Automation](#travis-automation) for how to auto-publish builds on OS X and Linux.
|
||||||
|
|
||||||
|
#### 8) You're done!
|
||||||
|
|
||||||
|
Now publish your module to the npm registry. Users will now be able to install your module from a binary.
|
||||||
|
|
||||||
|
What will happen is this:
|
||||||
|
|
||||||
|
1. `npm install <your package>` will pull from the npm registry
|
||||||
|
2. npm will run the `install` script which will call out to `node-pre-gyp`
|
||||||
|
3. `node-pre-gyp` will fetch the binary `.node` module and unpack in the right place
|
||||||
|
4. Assuming that all worked, you are done
|
||||||
|
|
||||||
|
If a a binary was not available for a given platform and `--fallback-to-build` was used then `node-gyp rebuild` will be called to try to source compile the module.
|
||||||
|
|
||||||
|
#### 9) One more option
|
||||||
|
|
||||||
|
It may be that you want to work with two s3 buckets, one for staging and one for production; this
|
||||||
|
arrangement makes it less likely to accidentally overwrite a production binary. It also allows the production
|
||||||
|
environment to have more restrictive permissions than staging while still enabling publishing when
|
||||||
|
developing and testing.
|
||||||
|
|
||||||
|
The binary.host property can be set at execution time. In order to do so all of the following conditions
|
||||||
|
must be true.
|
||||||
|
|
||||||
|
- binary.host is falsey or not present
|
||||||
|
- binary.staging_host is not empty
|
||||||
|
- binary.production_host is not empty
|
||||||
|
|
||||||
|
If any of these checks fail then the operation will not perform execution time determination of the s3 target.
|
||||||
|
|
||||||
|
If the command being executed is either "publish" or "unpublish" then the default is set to `binary.staging_host`. In all other cases
|
||||||
|
the default is `binary.production_host`.
|
||||||
|
|
||||||
|
The command-line options `--s3_host=staging` or `--s3_host=production` override the default. If `s3_host`
|
||||||
|
is present and not `staging` or `production` an exception is thrown.
|
||||||
|
|
||||||
|
This allows installing from staging by specifying `--s3_host=staging`. And it requires specifying
|
||||||
|
`--s3_option=production` in order to publish to, or unpublish from, production, making accidental errors less likely.
|
||||||
|
|
||||||
|
## Node-API Considerations
|
||||||
|
|
||||||
|
[Node-API](https://nodejs.org/api/n-api.html#n_api_node_api), which was previously known as N-API, is an ABI-stable alternative to previous technologies such as [nan](https://github.com/nodejs/nan) which are tied to a specific Node runtime engine. Node-API is Node runtime engine agnostic and guarantees modules created today will continue to run, without changes, into the future.
|
||||||
|
|
||||||
|
Using `node-pre-gyp` with Node-API projects requires a handful of additional configuration values and imposes some additional requirements.
|
||||||
|
|
||||||
|
The most significant difference is that an Node-API module can be coded to target multiple Node-API versions. Therefore, an Node-API module must declare in its `package.json` file which Node-API versions the module is designed to run against. In addition, since multiple builds may be required for a single module, path and file names must be specified in way that avoids naming conflicts.
|
||||||
|
|
||||||
|
### The `napi_versions` array property
|
||||||
|
|
||||||
|
A Node-API module must declare in its `package.json` file, the Node-API versions the module is intended to support. This is accomplished by including an `napi-versions` array property in the `binary` object. For example:
|
||||||
|
|
||||||
|
```js
|
||||||
|
"binary": {
|
||||||
|
"module_name": "your_module",
|
||||||
|
"module_path": "your_module_path",
|
||||||
|
"host": "https://your_bucket.s3-us-west-1.amazonaws.com",
|
||||||
|
"napi_versions": [1,3]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
If the `napi_versions` array property is *not* present, `node-pre-gyp` operates as it always has. Including the `napi_versions` array property instructs `node-pre-gyp` that this is a Node-API module build.
|
||||||
|
|
||||||
|
When the `napi_versions` array property is present, `node-pre-gyp` fires off multiple operations, one for each of the Node-API versions in the array. In the example above, two operations are initiated, one for Node-API version 1 and second for Node-API version 3. How this version number is communicated is described next.
|
||||||
|
|
||||||
|
### The `napi_build_version` value
|
||||||
|
|
||||||
|
For each of the Node-API module operations `node-pre-gyp` initiates, it ensures that the `napi_build_version` is set appropriately.
|
||||||
|
|
||||||
|
This value is of importance in two areas:
|
||||||
|
|
||||||
|
1. The C/C++ code which needs to know against which Node-API version it should compile.
|
||||||
|
2. `node-pre-gyp` itself which must assign appropriate path and file names to avoid collisions.
|
||||||
|
|
||||||
|
### Defining `NAPI_VERSION` for the C/C++ code
|
||||||
|
|
||||||
|
The `napi_build_version` value is communicated to the C/C++ code by adding this code to the `binding.gyp` file:
|
||||||
|
|
||||||
|
```
|
||||||
|
"defines": [
|
||||||
|
"NAPI_VERSION=<(napi_build_version)",
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
This ensures that `NAPI_VERSION`, an integer value, is declared appropriately to the C/C++ code for each build.
|
||||||
|
|
||||||
|
> Note that earlier versions of this document recommended defining the symbol `NAPI_BUILD_VERSION`. `NAPI_VERSION` is preferred because it used by the Node-API C/C++ headers to configure the specific Node-API versions being requested.
|
||||||
|
|
||||||
|
### Path and file naming requirements in `package.json`
|
||||||
|
|
||||||
|
Since `node-pre-gyp` fires off multiple operations for each request, it is essential that path and file names be created in such a way as to avoid collisions. This is accomplished by imposing additional path and file naming requirements.
|
||||||
|
|
||||||
|
Specifically, when performing Node-API builds, the `{napi_build_version}` text configuration value *must* be present in the `module_path` property. In addition, the `{napi_build_version}` text configuration value *must* be present in either the `remote_path` or `package_name` property. (No problem if it's in both.)
|
||||||
|
|
||||||
|
Here's an example:
|
||||||
|
|
||||||
|
```js
|
||||||
|
"binary": {
|
||||||
|
"module_name": "your_module",
|
||||||
|
"module_path": "./lib/binding/napi-v{napi_build_version}",
|
||||||
|
"remote_path": "./{module_name}/v{version}/{configuration}/",
|
||||||
|
"package_name": "{platform}-{arch}-napi-v{napi_build_version}.tar.gz",
|
||||||
|
"host": "https://your_bucket.s3-us-west-1.amazonaws.com",
|
||||||
|
"napi_versions": [1,3]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Supporting both Node-API and NAN builds
|
||||||
|
|
||||||
|
You may have a legacy native add-on that you wish to continue supporting for those versions of Node that do not support Node-API, as you add Node-API support for later Node versions. This can be accomplished by specifying the `node_napi_label` configuration value in the package.json `binary.package_name` property.
|
||||||
|
|
||||||
|
Placing the configuration value `node_napi_label` in the package.json `binary.package_name` property instructs `node-pre-gyp` to build all viable Node-API binaries supported by the current Node instance. If the current Node instance does not support Node-API, `node-pre-gyp` will request a traditional, non-Node-API build.
|
||||||
|
|
||||||
|
The configuration value `node_napi_label` is set by `node-pre-gyp` to the type of build created, `napi` or `node`, and the version number. For Node-API builds, the string contains the Node-API version nad has values like `napi-v3`. For traditional, non-Node-API builds, the string contains the ABI version with values like `node-v46`.
|
||||||
|
|
||||||
|
Here's how the `binary` configuration above might be changed to support both Node-API and NAN builds:
|
||||||
|
|
||||||
|
```js
|
||||||
|
"binary": {
|
||||||
|
"module_name": "your_module",
|
||||||
|
"module_path": "./lib/binding/{node_napi_label}",
|
||||||
|
"remote_path": "./{module_name}/v{version}/{configuration}/",
|
||||||
|
"package_name": "{platform}-{arch}-{node_napi_label}.tar.gz",
|
||||||
|
"host": "https://your_bucket.s3-us-west-1.amazonaws.com",
|
||||||
|
"napi_versions": [1,3]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
The C/C++ symbol `NAPI_VERSION` can be used to distinguish Node-API and non-Node-API builds. The value of `NAPI_VERSION` is set to the integer Node-API version for Node-API builds and is set to `0` for non-Node-API builds.
|
||||||
|
|
||||||
|
For example:
|
||||||
|
|
||||||
|
```C
|
||||||
|
#if NAPI_VERSION
|
||||||
|
// Node-API code goes here
|
||||||
|
#else
|
||||||
|
// NAN code goes here
|
||||||
|
#endif
|
||||||
|
```
|
||||||
|
|
||||||
|
### Two additional configuration values
|
||||||
|
|
||||||
|
The following two configuration values, which were implemented in previous versions of `node-pre-gyp`, continue to exist, but have been replaced by the `node_napi_label` configuration value described above.
|
||||||
|
|
||||||
|
1. `napi_version` If Node-API is supported by the currently executing Node instance, this value is the Node-API version number supported by Node. If Node-API is not supported, this value is an empty string.
|
||||||
|
|
||||||
|
2. `node_abi_napi` If the value returned for `napi_version` is non empty, this value is `'napi'`. If the value returned for `napi_version` is empty, this value is the value returned for `node_abi`.
|
||||||
|
|
||||||
|
These values are present for use in the `binding.gyp` file and may be used as `{napi_version}` and `{node_abi_napi}` for text substituion in the `binary` properties of the `package.json` file.
|
||||||
|
|
||||||
|
## S3 Hosting
|
||||||
|
|
||||||
|
You can host wherever you choose but S3 is cheap, `node-pre-gyp publish` expects it, and S3 can be integrated well with [Travis.ci](http://travis-ci.org) to automate builds for OS X and Ubuntu, and with [Appveyor](http://appveyor.com) to automate builds for Windows. Here is an approach to do this:
|
||||||
|
|
||||||
|
First, get setup locally and test the workflow:
|
||||||
|
|
||||||
|
#### 1) Create an S3 bucket
|
||||||
|
|
||||||
|
And have your **key** and **secret key** ready for writing to the bucket.
|
||||||
|
|
||||||
|
It is recommended to create a IAM user with a policy that only gives permissions to the specific bucket you plan to publish to. This can be done in the [IAM console](https://console.aws.amazon.com/iam/) by: 1) adding a new user, 2) choosing `Attach User Policy`, 3) Using the `Policy Generator`, 4) selecting `Amazon S3` for the service, 5) adding the actions: `DeleteObject`, `GetObject`, `GetObjectAcl`, `ListBucket`, `HeadBucket`, `PutObject`, `PutObjectAcl`, 6) adding an ARN of `arn:aws:s3:::bucket/*` (replacing `bucket` with your bucket name), and finally 7) clicking `Add Statement` and saving the policy. It should generate a policy like:
|
||||||
|
|
||||||
|
```js
|
||||||
|
{
|
||||||
|
"Version": "2012-10-17",
|
||||||
|
"Statement": [
|
||||||
|
{
|
||||||
|
"Sid": "objects",
|
||||||
|
"Effect": "Allow",
|
||||||
|
"Action": [
|
||||||
|
"s3:PutObject",
|
||||||
|
"s3:GetObjectAcl",
|
||||||
|
"s3:GetObject",
|
||||||
|
"s3:DeleteObject",
|
||||||
|
"s3:PutObjectAcl"
|
||||||
|
],
|
||||||
|
"Resource": "arn:aws:s3:::your-bucket-name/*"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Sid": "bucket",
|
||||||
|
"Effect": "Allow",
|
||||||
|
"Action": "s3:ListBucket",
|
||||||
|
"Resource": "arn:aws:s3:::your-bucket-name"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Sid": "buckets",
|
||||||
|
"Effect": "Allow",
|
||||||
|
"Action": "s3:HeadBucket",
|
||||||
|
"Resource": "*"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 2) Install node-pre-gyp
|
||||||
|
|
||||||
|
Either install it globally:
|
||||||
|
|
||||||
|
npm install node-pre-gyp -g
|
||||||
|
|
||||||
|
Or put the local version on your PATH
|
||||||
|
|
||||||
|
export PATH=`pwd`/node_modules/.bin/:$PATH
|
||||||
|
|
||||||
|
#### 3) Configure AWS credentials
|
||||||
|
|
||||||
|
It is recommended to configure the AWS JS SDK v2 used internally by `node-pre-gyp` by setting these environment variables:
|
||||||
|
|
||||||
|
- AWS_ACCESS_KEY_ID
|
||||||
|
- AWS_SECRET_ACCESS_KEY
|
||||||
|
|
||||||
|
But also you can also use the `Shared Config File` mentioned [in the AWS JS SDK v2 docs](https://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/configuring-the-jssdk.html)
|
||||||
|
|
||||||
|
#### 4) Package and publish your build
|
||||||
|
|
||||||
|
Install the `aws-sdk`:
|
||||||
|
|
||||||
|
npm install aws-sdk
|
||||||
|
|
||||||
|
Then publish:
|
||||||
|
|
||||||
|
node-pre-gyp package publish
|
||||||
|
|
||||||
|
Note: if you hit an error like `Hostname/IP doesn't match certificate's altnames` it may mean that you need to provide the `region` option in your config.
|
||||||
|
|
||||||
|
## Appveyor Automation
|
||||||
|
|
||||||
|
[Appveyor](http://www.appveyor.com/) can build binaries and publish the results per commit and supports:
|
||||||
|
|
||||||
|
- Windows Visual Studio 2013 and related compilers
|
||||||
|
- Both 64 bit (x64) and 32 bit (x86) build configurations
|
||||||
|
- Multiple Node.js versions
|
||||||
|
|
||||||
|
For an example of doing this see [node-sqlite3's appveyor.yml](https://github.com/mapbox/node-sqlite3/blob/master/appveyor.yml).
|
||||||
|
|
||||||
|
Below is a guide to getting set up:
|
||||||
|
|
||||||
|
#### 1) Create a free Appveyor account
|
||||||
|
|
||||||
|
Go to https://ci.appveyor.com/signup/free and sign in with your GitHub account.
|
||||||
|
|
||||||
|
#### 2) Create a new project
|
||||||
|
|
||||||
|
Go to https://ci.appveyor.com/projects/new and select the GitHub repo for your module
|
||||||
|
|
||||||
|
#### 3) Add appveyor.yml and push it
|
||||||
|
|
||||||
|
Once you have committed an `appveyor.yml` ([appveyor.yml reference](http://www.appveyor.com/docs/appveyor-yml)) to your GitHub repo and pushed it AppVeyor should automatically start building your project.
|
||||||
|
|
||||||
|
#### 4) Create secure variables
|
||||||
|
|
||||||
|
Encrypt your S3 AWS keys by going to <https://ci.appveyor.com/tools/encrypt> and hitting the `encrypt` button.
|
||||||
|
|
||||||
|
Then paste the result into your `appveyor.yml`
|
||||||
|
|
||||||
|
```yml
|
||||||
|
environment:
|
||||||
|
AWS_ACCESS_KEY_ID:
|
||||||
|
secure: Dn9HKdLNYvDgPdQOzRq/DqZ/MPhjknRHB1o+/lVU8MA=
|
||||||
|
AWS_SECRET_ACCESS_KEY:
|
||||||
|
secure: W1rwNoSnOku1r+28gnoufO8UA8iWADmL1LiiwH9IOkIVhDTNGdGPJqAlLjNqwLnL
|
||||||
|
```
|
||||||
|
|
||||||
|
NOTE: keys are per account but not per repo (this is difference than Travis where keys are per repo but not related to the account used to encrypt them).
|
||||||
|
|
||||||
|
#### 5) Hook up publishing
|
||||||
|
|
||||||
|
Just put `node-pre-gyp package publish` in your `appveyor.yml` after `npm install`.
|
||||||
|
|
||||||
|
#### 6) Publish when you want
|
||||||
|
|
||||||
|
You might wish to publish binaries only on a specific commit. To do this you could borrow from the [Travis CI idea of commit keywords](http://about.travis-ci.org/docs/user/how-to-skip-a-build/) and add special handling for commit messages with `[publish binary]`:
|
||||||
|
|
||||||
|
SET CM=%APPVEYOR_REPO_COMMIT_MESSAGE%
|
||||||
|
if not "%CM%" == "%CM:[publish binary]=%" node-pre-gyp --msvs_version=2013 publish
|
||||||
|
|
||||||
|
If your commit message contains special characters (e.g. `&`) this method might fail. An alternative is to use PowerShell, which gives you additional possibilities, like ignoring case by using `ToLower()`:
|
||||||
|
|
||||||
|
ps: if($env:APPVEYOR_REPO_COMMIT_MESSAGE.ToLower().Contains('[publish binary]')) { node-pre-gyp --msvs_version=2013 publish }
|
||||||
|
|
||||||
|
Remember this publishing is not the same as `npm publish`. We're just talking about the binary module here and not your entire npm package.
|
||||||
|
|
||||||
|
## Travis Automation
|
||||||
|
|
||||||
|
[Travis](https://travis-ci.org/) can push to S3 after a successful build and supports both:
|
||||||
|
|
||||||
|
- Ubuntu Precise and OS X (64 bit)
|
||||||
|
- Multiple Node.js versions
|
||||||
|
|
||||||
|
For an example of doing this see [node-add-example's .travis.yml](https://github.com/springmeyer/node-addon-example/blob/2ff60a8ded7f042864ad21db00c3a5a06cf47075/.travis.yml).
|
||||||
|
|
||||||
|
Note: if you need 32 bit binaries, this can be done from a 64 bit Travis machine. See [the node-sqlite3 scripts for an example of doing this](https://github.com/mapbox/node-sqlite3/blob/bae122aa6a2b8a45f6b717fab24e207740e32b5d/scripts/build_against_node.sh#L54-L74).
|
||||||
|
|
||||||
|
Below is a guide to getting set up:
|
||||||
|
|
||||||
|
#### 1) Install the Travis gem
|
||||||
|
|
||||||
|
gem install travis
|
||||||
|
|
||||||
|
#### 2) Create secure variables
|
||||||
|
|
||||||
|
Make sure you run this command from within the directory of your module.
|
||||||
|
|
||||||
|
Use `travis-encrypt` like:
|
||||||
|
|
||||||
|
travis encrypt AWS_ACCESS_KEY_ID=${node_pre_gyp_accessKeyId}
|
||||||
|
travis encrypt AWS_SECRET_ACCESS_KEY=${node_pre_gyp_secretAccessKey}
|
||||||
|
|
||||||
|
Then put those values in your `.travis.yml` like:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
env:
|
||||||
|
global:
|
||||||
|
- secure: F+sEL/v56CzHqmCSSES4pEyC9NeQlkoR0Gs/ZuZxX1ytrj8SKtp3MKqBj7zhIclSdXBz4Ev966Da5ctmcTd410p0b240MV6BVOkLUtkjZJyErMBOkeb8n8yVfSoeMx8RiIhBmIvEn+rlQq+bSFis61/JkE9rxsjkGRZi14hHr4M=
|
||||||
|
- secure: o2nkUQIiABD139XS6L8pxq3XO5gch27hvm/gOdV+dzNKc/s2KomVPWcOyXNxtJGhtecAkABzaW8KHDDi5QL1kNEFx6BxFVMLO8rjFPsMVaBG9Ks6JiDQkkmrGNcnVdxI/6EKTLHTH5WLsz8+J7caDBzvKbEfTux5EamEhxIWgrI=
|
||||||
|
```
|
||||||
|
|
||||||
|
More details on Travis encryption at http://about.travis-ci.org/docs/user/encryption-keys/.
|
||||||
|
|
||||||
|
#### 3) Hook up publishing
|
||||||
|
|
||||||
|
Just put `node-pre-gyp package publish` in your `.travis.yml` after `npm install`.
|
||||||
|
|
||||||
|
##### OS X publishing
|
||||||
|
|
||||||
|
If you want binaries for OS X in addition to linux you can enable [multi-os for Travis](http://docs.travis-ci.com/user/multi-os/#Setting-.travis.yml)
|
||||||
|
|
||||||
|
Use a configuration like:
|
||||||
|
|
||||||
|
```yml
|
||||||
|
|
||||||
|
language: cpp
|
||||||
|
|
||||||
|
os:
|
||||||
|
- linux
|
||||||
|
- osx
|
||||||
|
|
||||||
|
env:
|
||||||
|
matrix:
|
||||||
|
- NODE_VERSION="4"
|
||||||
|
- NODE_VERSION="6"
|
||||||
|
|
||||||
|
before_install:
|
||||||
|
- rm -rf ~/.nvm/ && git clone --depth 1 https://github.com/creationix/nvm.git ~/.nvm
|
||||||
|
- source ~/.nvm/nvm.sh
|
||||||
|
- nvm install $NODE_VERSION
|
||||||
|
- nvm use $NODE_VERSION
|
||||||
|
```
|
||||||
|
|
||||||
|
See [Travis OS X Gotchas](#travis-os-x-gotchas) for why we replace `language: node_js` and `node_js:` sections with `language: cpp` and a custom matrix.
|
||||||
|
|
||||||
|
Also create platform specific sections for any deps that need install. For example if you need libpng:
|
||||||
|
|
||||||
|
```yml
|
||||||
|
- if [ $(uname -s) == 'Linux' ]; then apt-get install libpng-dev; fi;
|
||||||
|
- if [ $(uname -s) == 'Darwin' ]; then brew install libpng; fi;
|
||||||
|
```
|
||||||
|
|
||||||
|
For detailed multi-OS examples see [node-mapnik](https://github.com/mapnik/node-mapnik/blob/master/.travis.yml) and [node-sqlite3](https://github.com/mapbox/node-sqlite3/blob/master/.travis.yml).
|
||||||
|
|
||||||
|
##### Travis OS X Gotchas
|
||||||
|
|
||||||
|
First, unlike the Travis Linux machines, the OS X machines do not put `node-pre-gyp` on PATH by default. To do so you will need to:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
export PATH=$(pwd)/node_modules/.bin:${PATH}
|
||||||
|
```
|
||||||
|
|
||||||
|
Second, the OS X machines do not support using a matrix for installing different Node.js versions. So you need to bootstrap the installation of Node.js in a cross platform way.
|
||||||
|
|
||||||
|
By doing:
|
||||||
|
|
||||||
|
```yml
|
||||||
|
env:
|
||||||
|
matrix:
|
||||||
|
- NODE_VERSION="4"
|
||||||
|
- NODE_VERSION="6"
|
||||||
|
|
||||||
|
before_install:
|
||||||
|
- rm -rf ~/.nvm/ && git clone --depth 1 https://github.com/creationix/nvm.git ~/.nvm
|
||||||
|
- source ~/.nvm/nvm.sh
|
||||||
|
- nvm install $NODE_VERSION
|
||||||
|
- nvm use $NODE_VERSION
|
||||||
|
```
|
||||||
|
|
||||||
|
You can easily recreate the previous behavior of this matrix:
|
||||||
|
|
||||||
|
```yml
|
||||||
|
node_js:
|
||||||
|
- "4"
|
||||||
|
- "6"
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 4) Publish when you want
|
||||||
|
|
||||||
|
You might wish to publish binaries only on a specific commit. To do this you could borrow from the [Travis CI idea of commit keywords](http://about.travis-ci.org/docs/user/how-to-skip-a-build/) and add special handling for commit messages with `[publish binary]`:
|
||||||
|
|
||||||
|
COMMIT_MESSAGE=$(git log --format=%B --no-merges -n 1 | tr -d '\n')
|
||||||
|
if [[ ${COMMIT_MESSAGE} =~ "[publish binary]" ]]; then node-pre-gyp publish; fi;
|
||||||
|
|
||||||
|
Then you can trigger new binaries to be built like:
|
||||||
|
|
||||||
|
git commit -a -m "[publish binary]"
|
||||||
|
|
||||||
|
Or, if you don't have any changes to make simply run:
|
||||||
|
|
||||||
|
git commit --allow-empty -m "[publish binary]"
|
||||||
|
|
||||||
|
WARNING: if you are working in a pull request and publishing binaries from there then you will want to avoid double publishing when Travis CI builds both the `push` and `pr`. You only want to run the publish on the `push` commit. See https://github.com/Project-OSRM/node-osrm/blob/8eb837abe2e2e30e595093d16e5354bc5c573575/scripts/is_pr_merge.sh which is called from https://github.com/Project-OSRM/node-osrm/blob/8eb837abe2e2e30e595093d16e5354bc5c573575/scripts/publish.sh for an example of how to do this.
|
||||||
|
|
||||||
|
Remember this publishing is not the same as `npm publish`. We're just talking about the binary module here and not your entire npm package. To automate the publishing of your entire package to npm on Travis see http://about.travis-ci.org/docs/user/deployment/npm/
|
||||||
|
|
||||||
|
# Versioning
|
||||||
|
|
||||||
|
The `binary` properties of `module_path`, `remote_path`, and `package_name` support variable substitution. The strings are evaluated by `node-pre-gyp` depending on your system and any custom build flags you passed.
|
||||||
|
|
||||||
|
- `node_abi`: The node C++ `ABI` number. This value is available in Javascript as `process.versions.modules` as of [`>= v0.10.4 >= v0.11.7`](https://github.com/joyent/node/commit/ccabd4a6fa8a6eb79d29bc3bbe9fe2b6531c2d8e) and in C++ as the `NODE_MODULE_VERSION` define much earlier. For versions of Node before this was available we fallback to the V8 major and minor version.
|
||||||
|
- `platform` matches node's `process.platform` like `linux`, `darwin`, and `win32` unless the user passed the `--target_platform` option to override.
|
||||||
|
- `arch` matches node's `process.arch` like `x64` or `ia32` unless the user passes the `--target_arch` option to override.
|
||||||
|
- `libc` matches `require('detect-libc').family` like `glibc` or `musl` unless the user passes the `--target_libc` option to override.
|
||||||
|
- `configuration` - Either 'Release' or 'Debug' depending on if `--debug` is passed during the build.
|
||||||
|
- `module_name` - the `binary.module_name` attribute from `package.json`.
|
||||||
|
- `version` - the semver `version` value for your module from `package.json` (NOTE: ignores the `semver.build` property).
|
||||||
|
- `major`, `minor`, `patch`, and `prelease` match the individual semver values for your module's `version`
|
||||||
|
- `build` - the sevmer `build` value. For example it would be `this.that` if your package.json `version` was `v1.0.0+this.that`
|
||||||
|
- `prerelease` - the semver `prerelease` value. For example it would be `alpha.beta` if your package.json `version` was `v1.0.0-alpha.beta`
|
||||||
|
|
||||||
|
|
||||||
|
The options are visible in the code at <https://github.com/mapbox/node-pre-gyp/blob/612b7bca2604508d881e1187614870ba19a7f0c5/lib/util/versioning.js#L114-L127>
|
||||||
|
|
||||||
|
# Download binary files from a mirror
|
||||||
|
|
||||||
|
S3 is broken in China for the well known reason.
|
||||||
|
|
||||||
|
Using the `npm` config argument: `--{module_name}_binary_host_mirror` can download binary files through a mirror, `-` in `module_name` will be replaced with `_`.
|
||||||
|
|
||||||
|
e.g.: Install [v8-profiler](https://www.npmjs.com/package/v8-profiler) from `npm`.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ npm install v8-profiler --profiler_binary_host_mirror=https://npm.taobao.org/mirrors/node-inspector/
|
||||||
|
```
|
||||||
|
|
||||||
|
e.g.: Install [canvas-prebuilt](https://www.npmjs.com/package/canvas-prebuilt) from `npm`.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ npm install canvas-prebuilt --canvas_prebuilt_binary_host_mirror=https://npm.taobao.org/mirrors/canvas-prebuilt/
|
||||||
|
```
|
||||||
4
node_modules/@mapbox/node-pre-gyp/bin/node-pre-gyp
generated
vendored
Executable file
4
node_modules/@mapbox/node-pre-gyp/bin/node-pre-gyp
generated
vendored
Executable file
@@ -0,0 +1,4 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
require('../lib/main');
|
||||||
2
node_modules/@mapbox/node-pre-gyp/bin/node-pre-gyp.cmd
generated
vendored
Normal file
2
node_modules/@mapbox/node-pre-gyp/bin/node-pre-gyp.cmd
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
@echo off
|
||||||
|
node "%~dp0\node-pre-gyp" %*
|
||||||
10
node_modules/@mapbox/node-pre-gyp/contributing.md
generated
vendored
Normal file
10
node_modules/@mapbox/node-pre-gyp/contributing.md
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
# Contributing
|
||||||
|
|
||||||
|
|
||||||
|
### Releasing a new version:
|
||||||
|
|
||||||
|
- Ensure tests are passing on travis and appveyor
|
||||||
|
- Run `node scripts/abi_crosswalk.js` and commit any changes
|
||||||
|
- Update the changelog
|
||||||
|
- Tag a new release like: `git tag -a v0.6.34 -m "tagging v0.6.34" && git push --tags`
|
||||||
|
- Run `npm publish`
|
||||||
51
node_modules/@mapbox/node-pre-gyp/lib/build.js
generated
vendored
Normal file
51
node_modules/@mapbox/node-pre-gyp/lib/build.js
generated
vendored
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
module.exports = exports = build;
|
||||||
|
|
||||||
|
exports.usage = 'Attempts to compile the module by dispatching to node-gyp or nw-gyp';
|
||||||
|
|
||||||
|
const napi = require('./util/napi.js');
|
||||||
|
const compile = require('./util/compile.js');
|
||||||
|
const handle_gyp_opts = require('./util/handle_gyp_opts.js');
|
||||||
|
const configure = require('./configure.js');
|
||||||
|
|
||||||
|
function do_build(gyp, argv, callback) {
|
||||||
|
handle_gyp_opts(gyp, argv, (err, result) => {
|
||||||
|
let final_args = ['build'].concat(result.gyp).concat(result.pre);
|
||||||
|
if (result.unparsed.length > 0) {
|
||||||
|
final_args = final_args.
|
||||||
|
concat(['--']).
|
||||||
|
concat(result.unparsed);
|
||||||
|
}
|
||||||
|
if (!err && result.opts.napi_build_version) {
|
||||||
|
napi.swap_build_dir_in(result.opts.napi_build_version);
|
||||||
|
}
|
||||||
|
compile.run_gyp(final_args, result.opts, (err2) => {
|
||||||
|
if (result.opts.napi_build_version) {
|
||||||
|
napi.swap_build_dir_out(result.opts.napi_build_version);
|
||||||
|
}
|
||||||
|
return callback(err2);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function build(gyp, argv, callback) {
|
||||||
|
|
||||||
|
// Form up commands to pass to node-gyp:
|
||||||
|
// We map `node-pre-gyp build` to `node-gyp configure build` so that we do not
|
||||||
|
// trigger a clean and therefore do not pay the penalty of a full recompile
|
||||||
|
if (argv.length && (argv.indexOf('rebuild') > -1)) {
|
||||||
|
argv.shift(); // remove `rebuild`
|
||||||
|
// here we map `node-pre-gyp rebuild` to `node-gyp rebuild` which internally means
|
||||||
|
// "clean + configure + build" and triggers a full recompile
|
||||||
|
compile.run_gyp(['clean'], {}, (err3) => {
|
||||||
|
if (err3) return callback(err3);
|
||||||
|
configure(gyp, argv, (err4) => {
|
||||||
|
if (err4) return callback(err4);
|
||||||
|
return do_build(gyp, argv, callback);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
return do_build(gyp, argv, callback);
|
||||||
|
}
|
||||||
|
}
|
||||||
31
node_modules/@mapbox/node-pre-gyp/lib/clean.js
generated
vendored
Normal file
31
node_modules/@mapbox/node-pre-gyp/lib/clean.js
generated
vendored
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
module.exports = exports = clean;
|
||||||
|
|
||||||
|
exports.usage = 'Removes the entire folder containing the compiled .node module';
|
||||||
|
|
||||||
|
const rm = require('rimraf');
|
||||||
|
const exists = require('fs').exists || require('path').exists;
|
||||||
|
const versioning = require('./util/versioning.js');
|
||||||
|
const napi = require('./util/napi.js');
|
||||||
|
const path = require('path');
|
||||||
|
|
||||||
|
function clean(gyp, argv, callback) {
|
||||||
|
const package_json = gyp.package_json;
|
||||||
|
const napi_build_version = napi.get_napi_build_version_from_command_args(argv);
|
||||||
|
const opts = versioning.evaluate(package_json, gyp.opts, napi_build_version);
|
||||||
|
const to_delete = opts.module_path;
|
||||||
|
if (!to_delete) {
|
||||||
|
return callback(new Error('module_path is empty, refusing to delete'));
|
||||||
|
} else if (path.normalize(to_delete) === path.normalize(process.cwd())) {
|
||||||
|
return callback(new Error('module_path is not set, refusing to delete'));
|
||||||
|
} else {
|
||||||
|
exists(to_delete, (found) => {
|
||||||
|
if (found) {
|
||||||
|
if (!gyp.opts.silent_clean) console.log('[' + package_json.name + '] Removing "%s"', to_delete);
|
||||||
|
return rm(to_delete, callback);
|
||||||
|
}
|
||||||
|
return callback();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
52
node_modules/@mapbox/node-pre-gyp/lib/configure.js
generated
vendored
Normal file
52
node_modules/@mapbox/node-pre-gyp/lib/configure.js
generated
vendored
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
module.exports = exports = configure;
|
||||||
|
|
||||||
|
exports.usage = 'Attempts to configure node-gyp or nw-gyp build';
|
||||||
|
|
||||||
|
const napi = require('./util/napi.js');
|
||||||
|
const compile = require('./util/compile.js');
|
||||||
|
const handle_gyp_opts = require('./util/handle_gyp_opts.js');
|
||||||
|
|
||||||
|
function configure(gyp, argv, callback) {
|
||||||
|
handle_gyp_opts(gyp, argv, (err, result) => {
|
||||||
|
let final_args = result.gyp.concat(result.pre);
|
||||||
|
// pull select node-gyp configure options out of the npm environ
|
||||||
|
const known_gyp_args = ['dist-url', 'python', 'nodedir', 'msvs_version'];
|
||||||
|
known_gyp_args.forEach((key) => {
|
||||||
|
const val = gyp.opts[key] || gyp.opts[key.replace('-', '_')];
|
||||||
|
if (val) {
|
||||||
|
final_args.push('--' + key + '=' + val);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
// --ensure=false tell node-gyp to re-install node development headers
|
||||||
|
// but it is only respected by node-gyp install, so we have to call install
|
||||||
|
// as a separate step if the user passes it
|
||||||
|
if (gyp.opts.ensure === false) {
|
||||||
|
const install_args = final_args.concat(['install', '--ensure=false']);
|
||||||
|
compile.run_gyp(install_args, result.opts, (err2) => {
|
||||||
|
if (err2) return callback(err2);
|
||||||
|
if (result.unparsed.length > 0) {
|
||||||
|
final_args = final_args.
|
||||||
|
concat(['--']).
|
||||||
|
concat(result.unparsed);
|
||||||
|
}
|
||||||
|
compile.run_gyp(['configure'].concat(final_args), result.opts, (err3) => {
|
||||||
|
return callback(err3);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
if (result.unparsed.length > 0) {
|
||||||
|
final_args = final_args.
|
||||||
|
concat(['--']).
|
||||||
|
concat(result.unparsed);
|
||||||
|
}
|
||||||
|
compile.run_gyp(['configure'].concat(final_args), result.opts, (err4) => {
|
||||||
|
if (!err4 && result.opts.napi_build_version) {
|
||||||
|
napi.swap_build_dir_out(result.opts.napi_build_version);
|
||||||
|
}
|
||||||
|
return callback(err4);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
38
node_modules/@mapbox/node-pre-gyp/lib/info.js
generated
vendored
Normal file
38
node_modules/@mapbox/node-pre-gyp/lib/info.js
generated
vendored
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
module.exports = exports = info;
|
||||||
|
|
||||||
|
exports.usage = 'Lists all published binaries (requires aws-sdk)';
|
||||||
|
|
||||||
|
const log = require('npmlog');
|
||||||
|
const versioning = require('./util/versioning.js');
|
||||||
|
const s3_setup = require('./util/s3_setup.js');
|
||||||
|
|
||||||
|
function info(gyp, argv, callback) {
|
||||||
|
const package_json = gyp.package_json;
|
||||||
|
const opts = versioning.evaluate(package_json, gyp.opts);
|
||||||
|
const config = {};
|
||||||
|
s3_setup.detect(opts, config);
|
||||||
|
const s3 = s3_setup.get_s3(config);
|
||||||
|
const s3_opts = {
|
||||||
|
Bucket: config.bucket,
|
||||||
|
Prefix: config.prefix
|
||||||
|
};
|
||||||
|
s3.listObjects(s3_opts, (err, meta) => {
|
||||||
|
if (err && err.code === 'NotFound') {
|
||||||
|
return callback(new Error('[' + package_json.name + '] Not found: https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + config.prefix));
|
||||||
|
} else if (err) {
|
||||||
|
return callback(err);
|
||||||
|
} else {
|
||||||
|
log.verbose(JSON.stringify(meta, null, 1));
|
||||||
|
if (meta && meta.Contents) {
|
||||||
|
meta.Contents.forEach((obj) => {
|
||||||
|
console.log(obj.Key);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
console.error('[' + package_json.name + '] No objects found at https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + config.prefix);
|
||||||
|
}
|
||||||
|
return callback();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
235
node_modules/@mapbox/node-pre-gyp/lib/install.js
generated
vendored
Normal file
235
node_modules/@mapbox/node-pre-gyp/lib/install.js
generated
vendored
Normal file
@@ -0,0 +1,235 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
module.exports = exports = install;
|
||||||
|
|
||||||
|
exports.usage = 'Attempts to install pre-built binary for module';
|
||||||
|
|
||||||
|
const fs = require('fs');
|
||||||
|
const path = require('path');
|
||||||
|
const log = require('npmlog');
|
||||||
|
const existsAsync = fs.exists || path.exists;
|
||||||
|
const versioning = require('./util/versioning.js');
|
||||||
|
const napi = require('./util/napi.js');
|
||||||
|
const makeDir = require('make-dir');
|
||||||
|
// for fetching binaries
|
||||||
|
const fetch = require('node-fetch');
|
||||||
|
const tar = require('tar');
|
||||||
|
|
||||||
|
let npgVersion = 'unknown';
|
||||||
|
try {
|
||||||
|
// Read own package.json to get the current node-pre-pyp version.
|
||||||
|
const ownPackageJSON = fs.readFileSync(path.join(__dirname, '..', 'package.json'), 'utf8');
|
||||||
|
npgVersion = JSON.parse(ownPackageJSON).version;
|
||||||
|
} catch (e) {
|
||||||
|
// do nothing
|
||||||
|
}
|
||||||
|
|
||||||
|
function place_binary(uri, targetDir, opts, callback) {
|
||||||
|
log.http('GET', uri);
|
||||||
|
|
||||||
|
// Try getting version info from the currently running npm.
|
||||||
|
const envVersionInfo = process.env.npm_config_user_agent ||
|
||||||
|
'node ' + process.version;
|
||||||
|
|
||||||
|
const sanitized = uri.replace('+', '%2B');
|
||||||
|
const requestOpts = {
|
||||||
|
uri: sanitized,
|
||||||
|
headers: {
|
||||||
|
'User-Agent': 'node-pre-gyp (v' + npgVersion + ', ' + envVersionInfo + ')'
|
||||||
|
},
|
||||||
|
follow_max: 10
|
||||||
|
};
|
||||||
|
|
||||||
|
if (opts.cafile) {
|
||||||
|
try {
|
||||||
|
requestOpts.ca = fs.readFileSync(opts.cafile);
|
||||||
|
} catch (e) {
|
||||||
|
return callback(e);
|
||||||
|
}
|
||||||
|
} else if (opts.ca) {
|
||||||
|
requestOpts.ca = opts.ca;
|
||||||
|
}
|
||||||
|
|
||||||
|
const proxyUrl = opts.proxy ||
|
||||||
|
process.env.http_proxy ||
|
||||||
|
process.env.HTTP_PROXY ||
|
||||||
|
process.env.npm_config_proxy;
|
||||||
|
let agent;
|
||||||
|
if (proxyUrl) {
|
||||||
|
const ProxyAgent = require('https-proxy-agent');
|
||||||
|
agent = new ProxyAgent(proxyUrl);
|
||||||
|
log.http('download', 'proxy agent configured using: "%s"', proxyUrl);
|
||||||
|
}
|
||||||
|
|
||||||
|
fetch(sanitized, { agent })
|
||||||
|
.then((res) => {
|
||||||
|
if (!res.ok) {
|
||||||
|
throw new Error(`response status ${res.status} ${res.statusText} on ${sanitized}`);
|
||||||
|
}
|
||||||
|
const dataStream = res.body;
|
||||||
|
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
let extractions = 0;
|
||||||
|
const countExtractions = (entry) => {
|
||||||
|
extractions += 1;
|
||||||
|
log.info('install', 'unpacking %s', entry.path);
|
||||||
|
};
|
||||||
|
|
||||||
|
dataStream.pipe(extract(targetDir, countExtractions))
|
||||||
|
.on('error', (e) => {
|
||||||
|
reject(e);
|
||||||
|
});
|
||||||
|
dataStream.on('end', () => {
|
||||||
|
resolve(`extracted file count: ${extractions}`);
|
||||||
|
});
|
||||||
|
dataStream.on('error', (e) => {
|
||||||
|
reject(e);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
})
|
||||||
|
.then((text) => {
|
||||||
|
log.info(text);
|
||||||
|
callback();
|
||||||
|
})
|
||||||
|
.catch((e) => {
|
||||||
|
log.error(`install ${e.message}`);
|
||||||
|
callback(e);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function extract(to, onentry) {
|
||||||
|
return tar.extract({
|
||||||
|
cwd: to,
|
||||||
|
strip: 1,
|
||||||
|
onentry
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function extract_from_local(from, targetDir, callback) {
|
||||||
|
if (!fs.existsSync(from)) {
|
||||||
|
return callback(new Error('Cannot find file ' + from));
|
||||||
|
}
|
||||||
|
log.info('Found local file to extract from ' + from);
|
||||||
|
|
||||||
|
// extract helpers
|
||||||
|
let extractCount = 0;
|
||||||
|
function countExtractions(entry) {
|
||||||
|
extractCount += 1;
|
||||||
|
log.info('install', 'unpacking ' + entry.path);
|
||||||
|
}
|
||||||
|
function afterExtract(err) {
|
||||||
|
if (err) return callback(err);
|
||||||
|
if (extractCount === 0) {
|
||||||
|
return callback(new Error('There was a fatal problem while extracting the tarball'));
|
||||||
|
}
|
||||||
|
log.info('tarball', 'done parsing tarball');
|
||||||
|
callback();
|
||||||
|
}
|
||||||
|
|
||||||
|
fs.createReadStream(from).pipe(extract(targetDir, countExtractions))
|
||||||
|
.on('close', afterExtract)
|
||||||
|
.on('error', afterExtract);
|
||||||
|
}
|
||||||
|
|
||||||
|
function do_build(gyp, argv, callback) {
|
||||||
|
const args = ['rebuild'].concat(argv);
|
||||||
|
gyp.todo.push({ name: 'build', args: args });
|
||||||
|
process.nextTick(callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
function print_fallback_error(err, opts, package_json) {
|
||||||
|
const fallback_message = ' (falling back to source compile with node-gyp)';
|
||||||
|
let full_message = '';
|
||||||
|
if (err.statusCode !== undefined) {
|
||||||
|
// If we got a network response it but failed to download
|
||||||
|
// it means remote binaries are not available, so let's try to help
|
||||||
|
// the user/developer with the info to debug why
|
||||||
|
full_message = 'Pre-built binaries not found for ' + package_json.name + '@' + package_json.version;
|
||||||
|
full_message += ' and ' + opts.runtime + '@' + (opts.target || process.versions.node) + ' (' + opts.node_abi + ' ABI, ' + opts.libc + ')';
|
||||||
|
full_message += fallback_message;
|
||||||
|
log.warn('Tried to download(' + err.statusCode + '): ' + opts.hosted_tarball);
|
||||||
|
log.warn(full_message);
|
||||||
|
log.http(err.message);
|
||||||
|
} else {
|
||||||
|
// If we do not have a statusCode that means an unexpected error
|
||||||
|
// happened and prevented an http response, so we output the exact error
|
||||||
|
full_message = 'Pre-built binaries not installable for ' + package_json.name + '@' + package_json.version;
|
||||||
|
full_message += ' and ' + opts.runtime + '@' + (opts.target || process.versions.node) + ' (' + opts.node_abi + ' ABI, ' + opts.libc + ')';
|
||||||
|
full_message += fallback_message;
|
||||||
|
log.warn(full_message);
|
||||||
|
log.warn('Hit error ' + err.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// install
|
||||||
|
//
|
||||||
|
function install(gyp, argv, callback) {
|
||||||
|
const package_json = gyp.package_json;
|
||||||
|
const napi_build_version = napi.get_napi_build_version_from_command_args(argv);
|
||||||
|
const source_build = gyp.opts['build-from-source'] || gyp.opts.build_from_source;
|
||||||
|
const update_binary = gyp.opts['update-binary'] || gyp.opts.update_binary;
|
||||||
|
const should_do_source_build = source_build === package_json.name || (source_build === true || source_build === 'true');
|
||||||
|
if (should_do_source_build) {
|
||||||
|
log.info('build', 'requesting source compile');
|
||||||
|
return do_build(gyp, argv, callback);
|
||||||
|
} else {
|
||||||
|
const fallback_to_build = gyp.opts['fallback-to-build'] || gyp.opts.fallback_to_build;
|
||||||
|
let should_do_fallback_build = fallback_to_build === package_json.name || (fallback_to_build === true || fallback_to_build === 'true');
|
||||||
|
// but allow override from npm
|
||||||
|
if (process.env.npm_config_argv) {
|
||||||
|
const cooked = JSON.parse(process.env.npm_config_argv).cooked;
|
||||||
|
const match = cooked.indexOf('--fallback-to-build');
|
||||||
|
if (match > -1 && cooked.length > match && cooked[match + 1] === 'false') {
|
||||||
|
should_do_fallback_build = false;
|
||||||
|
log.info('install', 'Build fallback disabled via npm flag: --fallback-to-build=false');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let opts;
|
||||||
|
try {
|
||||||
|
opts = versioning.evaluate(package_json, gyp.opts, napi_build_version);
|
||||||
|
} catch (err) {
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
opts.ca = gyp.opts.ca;
|
||||||
|
opts.cafile = gyp.opts.cafile;
|
||||||
|
|
||||||
|
const from = opts.hosted_tarball;
|
||||||
|
const to = opts.module_path;
|
||||||
|
const binary_module = path.join(to, opts.module_name + '.node');
|
||||||
|
existsAsync(binary_module, (found) => {
|
||||||
|
if (!update_binary) {
|
||||||
|
if (found) {
|
||||||
|
console.log('[' + package_json.name + '] Success: "' + binary_module + '" already installed');
|
||||||
|
console.log('Pass --update-binary to reinstall or --build-from-source to recompile');
|
||||||
|
return callback();
|
||||||
|
}
|
||||||
|
log.info('check', 'checked for "' + binary_module + '" (not found)');
|
||||||
|
}
|
||||||
|
|
||||||
|
makeDir(to).then(() => {
|
||||||
|
const fileName = from.startsWith('file://') && from.slice('file://'.length);
|
||||||
|
if (fileName) {
|
||||||
|
extract_from_local(fileName, to, after_place);
|
||||||
|
} else {
|
||||||
|
place_binary(from, to, opts, after_place);
|
||||||
|
}
|
||||||
|
}).catch((err) => {
|
||||||
|
after_place(err);
|
||||||
|
});
|
||||||
|
|
||||||
|
function after_place(err) {
|
||||||
|
if (err && should_do_fallback_build) {
|
||||||
|
print_fallback_error(err, opts, package_json);
|
||||||
|
return do_build(gyp, argv, callback);
|
||||||
|
} else if (err) {
|
||||||
|
return callback(err);
|
||||||
|
} else {
|
||||||
|
console.log('[' + package_json.name + '] Success: "' + binary_module + '" is installed via remote');
|
||||||
|
return callback();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
125
node_modules/@mapbox/node-pre-gyp/lib/main.js
generated
vendored
Normal file
125
node_modules/@mapbox/node-pre-gyp/lib/main.js
generated
vendored
Normal file
@@ -0,0 +1,125 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the title.
|
||||||
|
*/
|
||||||
|
|
||||||
|
process.title = 'node-pre-gyp';
|
||||||
|
|
||||||
|
const node_pre_gyp = require('../');
|
||||||
|
const log = require('npmlog');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process and execute the selected commands.
|
||||||
|
*/
|
||||||
|
|
||||||
|
const prog = new node_pre_gyp.Run({ argv: process.argv });
|
||||||
|
let completed = false;
|
||||||
|
|
||||||
|
if (prog.todo.length === 0) {
|
||||||
|
if (~process.argv.indexOf('-v') || ~process.argv.indexOf('--version')) {
|
||||||
|
console.log('v%s', prog.version);
|
||||||
|
process.exit(0);
|
||||||
|
} else if (~process.argv.indexOf('-h') || ~process.argv.indexOf('--help')) {
|
||||||
|
console.log('%s', prog.usage());
|
||||||
|
process.exit(0);
|
||||||
|
}
|
||||||
|
console.log('%s', prog.usage());
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
// if --no-color is passed
|
||||||
|
if (prog.opts && Object.hasOwnProperty.call(prog, 'color') && !prog.opts.color) {
|
||||||
|
log.disableColor();
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info('it worked if it ends with', 'ok');
|
||||||
|
log.verbose('cli', process.argv);
|
||||||
|
log.info('using', process.title + '@%s', prog.version);
|
||||||
|
log.info('using', 'node@%s | %s | %s', process.versions.node, process.platform, process.arch);
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Change dir if -C/--directory was passed.
|
||||||
|
*/
|
||||||
|
|
||||||
|
const dir = prog.opts.directory;
|
||||||
|
if (dir) {
|
||||||
|
const fs = require('fs');
|
||||||
|
try {
|
||||||
|
const stat = fs.statSync(dir);
|
||||||
|
if (stat.isDirectory()) {
|
||||||
|
log.info('chdir', dir);
|
||||||
|
process.chdir(dir);
|
||||||
|
} else {
|
||||||
|
log.warn('chdir', dir + ' is not a directory');
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
if (e.code === 'ENOENT') {
|
||||||
|
log.warn('chdir', dir + ' is not a directory');
|
||||||
|
} else {
|
||||||
|
log.warn('chdir', 'error during chdir() "%s"', e.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function run() {
|
||||||
|
const command = prog.todo.shift();
|
||||||
|
if (!command) {
|
||||||
|
// done!
|
||||||
|
completed = true;
|
||||||
|
log.info('ok');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// set binary.host when appropriate. host determines the s3 target bucket.
|
||||||
|
const target = prog.setBinaryHostProperty(command.name);
|
||||||
|
if (target && ['install', 'publish', 'unpublish', 'info'].indexOf(command.name) >= 0) {
|
||||||
|
log.info('using binary.host: ' + prog.package_json.binary.host);
|
||||||
|
}
|
||||||
|
|
||||||
|
prog.commands[command.name](command.args, function(err) {
|
||||||
|
if (err) {
|
||||||
|
log.error(command.name + ' error');
|
||||||
|
log.error('stack', err.stack);
|
||||||
|
errorMessage();
|
||||||
|
log.error('not ok');
|
||||||
|
console.log(err.message);
|
||||||
|
return process.exit(1);
|
||||||
|
}
|
||||||
|
const args_array = [].slice.call(arguments, 1);
|
||||||
|
if (args_array.length) {
|
||||||
|
console.log.apply(console, args_array);
|
||||||
|
}
|
||||||
|
// now run the next command in the queue
|
||||||
|
process.nextTick(run);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
process.on('exit', (code) => {
|
||||||
|
if (!completed && !code) {
|
||||||
|
log.error('Completion callback never invoked!');
|
||||||
|
errorMessage();
|
||||||
|
process.exit(6);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
process.on('uncaughtException', (err) => {
|
||||||
|
log.error('UNCAUGHT EXCEPTION');
|
||||||
|
log.error('stack', err.stack);
|
||||||
|
errorMessage();
|
||||||
|
process.exit(7);
|
||||||
|
});
|
||||||
|
|
||||||
|
function errorMessage() {
|
||||||
|
// copied from npm's lib/util/error-handler.js
|
||||||
|
const os = require('os');
|
||||||
|
log.error('System', os.type() + ' ' + os.release());
|
||||||
|
log.error('command', process.argv.map(JSON.stringify).join(' '));
|
||||||
|
log.error('cwd', process.cwd());
|
||||||
|
log.error('node -v', process.version);
|
||||||
|
log.error(process.title + ' -v', 'v' + prog.package.version);
|
||||||
|
}
|
||||||
|
|
||||||
|
// start running the given commands!
|
||||||
|
run();
|
||||||
309
node_modules/@mapbox/node-pre-gyp/lib/node-pre-gyp.js
generated
vendored
Normal file
309
node_modules/@mapbox/node-pre-gyp/lib/node-pre-gyp.js
generated
vendored
Normal file
@@ -0,0 +1,309 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Module exports.
|
||||||
|
*/
|
||||||
|
|
||||||
|
module.exports = exports;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Module dependencies.
|
||||||
|
*/
|
||||||
|
|
||||||
|
// load mocking control function for accessing s3 via https. the function is a noop always returning
|
||||||
|
// false if not mocking.
|
||||||
|
exports.mockS3Http = require('./util/s3_setup').get_mockS3Http();
|
||||||
|
exports.mockS3Http('on');
|
||||||
|
const mocking = exports.mockS3Http('get');
|
||||||
|
|
||||||
|
|
||||||
|
const fs = require('fs');
|
||||||
|
const path = require('path');
|
||||||
|
const nopt = require('nopt');
|
||||||
|
const log = require('npmlog');
|
||||||
|
log.disableProgress();
|
||||||
|
const napi = require('./util/napi.js');
|
||||||
|
|
||||||
|
const EE = require('events').EventEmitter;
|
||||||
|
const inherits = require('util').inherits;
|
||||||
|
const cli_commands = [
|
||||||
|
'clean',
|
||||||
|
'install',
|
||||||
|
'reinstall',
|
||||||
|
'build',
|
||||||
|
'rebuild',
|
||||||
|
'package',
|
||||||
|
'testpackage',
|
||||||
|
'publish',
|
||||||
|
'unpublish',
|
||||||
|
'info',
|
||||||
|
'testbinary',
|
||||||
|
'reveal',
|
||||||
|
'configure'
|
||||||
|
];
|
||||||
|
const aliases = {};
|
||||||
|
|
||||||
|
// differentiate node-pre-gyp's logs from npm's
|
||||||
|
log.heading = 'node-pre-gyp';
|
||||||
|
|
||||||
|
if (mocking) {
|
||||||
|
log.warn(`mocking s3 to ${process.env.node_pre_gyp_mock_s3}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// this is a getter to avoid circular reference warnings with node v14.
|
||||||
|
Object.defineProperty(exports, 'find', {
|
||||||
|
get: function() {
|
||||||
|
return require('./pre-binding').find;
|
||||||
|
},
|
||||||
|
enumerable: true
|
||||||
|
});
|
||||||
|
|
||||||
|
// in the following, "my_module" is using node-pre-gyp to
|
||||||
|
// prebuild and install pre-built binaries. "main_module"
|
||||||
|
// is using "my_module".
|
||||||
|
//
|
||||||
|
// "bin/node-pre-gyp" invokes Run() without a path. the
|
||||||
|
// expectation is that the working directory is the package
|
||||||
|
// root "my_module". this is true because in all cases npm is
|
||||||
|
// executing a script in the context of "my_module".
|
||||||
|
//
|
||||||
|
// "pre-binding.find()" is executed by "my_module" but in the
|
||||||
|
// context of "main_module". this is because "main_module" is
|
||||||
|
// executing and requires "my_module" which is then executing
|
||||||
|
// "pre-binding.find()" via "node-pre-gyp.find()", so the working
|
||||||
|
// directory is that of "main_module".
|
||||||
|
//
|
||||||
|
// that's why "find()" must pass the path to package.json.
|
||||||
|
//
|
||||||
|
function Run({ package_json_path = './package.json', argv }) {
|
||||||
|
this.package_json_path = package_json_path;
|
||||||
|
this.commands = {};
|
||||||
|
|
||||||
|
const self = this;
|
||||||
|
cli_commands.forEach((command) => {
|
||||||
|
self.commands[command] = function(argvx, callback) {
|
||||||
|
log.verbose('command', command, argvx);
|
||||||
|
return require('./' + command)(self, argvx, callback);
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
this.parseArgv(argv);
|
||||||
|
|
||||||
|
// this is set to true after the binary.host property was set to
|
||||||
|
// either staging_host or production_host.
|
||||||
|
this.binaryHostSet = false;
|
||||||
|
}
|
||||||
|
inherits(Run, EE);
|
||||||
|
exports.Run = Run;
|
||||||
|
const proto = Run.prototype;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Export the contents of the package.json.
|
||||||
|
*/
|
||||||
|
|
||||||
|
proto.package = require('../package.json');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* nopt configuration definitions
|
||||||
|
*/
|
||||||
|
|
||||||
|
proto.configDefs = {
|
||||||
|
help: Boolean, // everywhere
|
||||||
|
arch: String, // 'configure'
|
||||||
|
debug: Boolean, // 'build'
|
||||||
|
directory: String, // bin
|
||||||
|
proxy: String, // 'install'
|
||||||
|
loglevel: String // everywhere
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* nopt shorthands
|
||||||
|
*/
|
||||||
|
|
||||||
|
proto.shorthands = {
|
||||||
|
release: '--no-debug',
|
||||||
|
C: '--directory',
|
||||||
|
debug: '--debug',
|
||||||
|
j: '--jobs',
|
||||||
|
silent: '--loglevel=silent',
|
||||||
|
silly: '--loglevel=silly',
|
||||||
|
verbose: '--loglevel=verbose'
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* expose the command aliases for the bin file to use.
|
||||||
|
*/
|
||||||
|
|
||||||
|
proto.aliases = aliases;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses the given argv array and sets the 'opts', 'argv',
|
||||||
|
* 'command', and 'package_json' properties.
|
||||||
|
*/
|
||||||
|
|
||||||
|
proto.parseArgv = function parseOpts(argv) {
|
||||||
|
this.opts = nopt(this.configDefs, this.shorthands, argv);
|
||||||
|
this.argv = this.opts.argv.remain.slice();
|
||||||
|
const commands = this.todo = [];
|
||||||
|
|
||||||
|
// create a copy of the argv array with aliases mapped
|
||||||
|
argv = this.argv.map((arg) => {
|
||||||
|
// is this an alias?
|
||||||
|
if (arg in this.aliases) {
|
||||||
|
arg = this.aliases[arg];
|
||||||
|
}
|
||||||
|
return arg;
|
||||||
|
});
|
||||||
|
|
||||||
|
// process the mapped args into "command" objects ("name" and "args" props)
|
||||||
|
argv.slice().forEach((arg) => {
|
||||||
|
if (arg in this.commands) {
|
||||||
|
const args = argv.splice(0, argv.indexOf(arg));
|
||||||
|
argv.shift();
|
||||||
|
if (commands.length > 0) {
|
||||||
|
commands[commands.length - 1].args = args;
|
||||||
|
}
|
||||||
|
commands.push({ name: arg, args: [] });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
if (commands.length > 0) {
|
||||||
|
commands[commands.length - 1].args = argv.splice(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// if a directory was specified package.json is assumed to be relative
|
||||||
|
// to it.
|
||||||
|
let package_json_path = this.package_json_path;
|
||||||
|
if (this.opts.directory) {
|
||||||
|
package_json_path = path.join(this.opts.directory, package_json_path);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.package_json = JSON.parse(fs.readFileSync(package_json_path));
|
||||||
|
|
||||||
|
// expand commands entries for multiple napi builds
|
||||||
|
this.todo = napi.expand_commands(this.package_json, this.opts, commands);
|
||||||
|
|
||||||
|
// support for inheriting config env variables from npm
|
||||||
|
const npm_config_prefix = 'npm_config_';
|
||||||
|
Object.keys(process.env).forEach((name) => {
|
||||||
|
if (name.indexOf(npm_config_prefix) !== 0) return;
|
||||||
|
const val = process.env[name];
|
||||||
|
if (name === npm_config_prefix + 'loglevel') {
|
||||||
|
log.level = val;
|
||||||
|
} else {
|
||||||
|
// add the user-defined options to the config
|
||||||
|
name = name.substring(npm_config_prefix.length);
|
||||||
|
// avoid npm argv clobber already present args
|
||||||
|
// which avoids problem of 'npm test' calling
|
||||||
|
// script that runs unique npm install commands
|
||||||
|
if (name === 'argv') {
|
||||||
|
if (this.opts.argv &&
|
||||||
|
this.opts.argv.remain &&
|
||||||
|
this.opts.argv.remain.length) {
|
||||||
|
// do nothing
|
||||||
|
} else {
|
||||||
|
this.opts[name] = val;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
this.opts[name] = val;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (this.opts.loglevel) {
|
||||||
|
log.level = this.opts.loglevel;
|
||||||
|
}
|
||||||
|
log.resume();
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* allow the binary.host property to be set at execution time.
|
||||||
|
*
|
||||||
|
* for this to take effect requires all the following to be true.
|
||||||
|
* - binary is a property in package.json
|
||||||
|
* - binary.host is falsey
|
||||||
|
* - binary.staging_host is not empty
|
||||||
|
* - binary.production_host is not empty
|
||||||
|
*
|
||||||
|
* if any of the previous checks fail then the function returns an empty string
|
||||||
|
* and makes no changes to package.json's binary property.
|
||||||
|
*
|
||||||
|
*
|
||||||
|
* if command is "publish" then the default is set to "binary.staging_host"
|
||||||
|
* if command is not "publish" the the default is set to "binary.production_host"
|
||||||
|
*
|
||||||
|
* if the command-line option '--s3_host' is set to "staging" or "production" then
|
||||||
|
* "binary.host" is set to the specified "staging_host" or "production_host". if
|
||||||
|
* '--s3_host' is any other value an exception is thrown.
|
||||||
|
*
|
||||||
|
* if '--s3_host' is not present then "binary.host" is set to the default as above.
|
||||||
|
*
|
||||||
|
* this strategy was chosen so that any command other than "publish" or "unpublish" uses "production"
|
||||||
|
* as the default without requiring any command-line options but that "publish" and "unpublish" require
|
||||||
|
* '--s3_host production_host' to be specified in order to *really* publish (or unpublish). publishing
|
||||||
|
* to staging can be done freely without worrying about disturbing any production releases.
|
||||||
|
*/
|
||||||
|
proto.setBinaryHostProperty = function(command) {
|
||||||
|
if (this.binaryHostSet) {
|
||||||
|
return this.package_json.binary.host;
|
||||||
|
}
|
||||||
|
const p = this.package_json;
|
||||||
|
// don't set anything if host is present. it must be left blank to trigger this.
|
||||||
|
if (!p || !p.binary || p.binary.host) {
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
// and both staging and production must be present. errors will be reported later.
|
||||||
|
if (!p.binary.staging_host || !p.binary.production_host) {
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
let target = 'production_host';
|
||||||
|
if (command === 'publish' || command === 'unpublish') {
|
||||||
|
target = 'staging_host';
|
||||||
|
}
|
||||||
|
// the environment variable has priority over the default or the command line. if
|
||||||
|
// either the env var or the command line option are invalid throw an error.
|
||||||
|
const npg_s3_host = process.env.node_pre_gyp_s3_host;
|
||||||
|
if (npg_s3_host === 'staging' || npg_s3_host === 'production') {
|
||||||
|
target = `${npg_s3_host}_host`;
|
||||||
|
} else if (this.opts['s3_host'] === 'staging' || this.opts['s3_host'] === 'production') {
|
||||||
|
target = `${this.opts['s3_host']}_host`;
|
||||||
|
} else if (this.opts['s3_host'] || npg_s3_host) {
|
||||||
|
throw new Error(`invalid s3_host ${this.opts['s3_host'] || npg_s3_host}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
p.binary.host = p.binary[target];
|
||||||
|
this.binaryHostSet = true;
|
||||||
|
|
||||||
|
return p.binary.host;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the usage instructions for node-pre-gyp.
|
||||||
|
*/
|
||||||
|
|
||||||
|
proto.usage = function usage() {
|
||||||
|
const str = [
|
||||||
|
'',
|
||||||
|
' Usage: node-pre-gyp <command> [options]',
|
||||||
|
'',
|
||||||
|
' where <command> is one of:',
|
||||||
|
cli_commands.map((c) => {
|
||||||
|
return ' - ' + c + ' - ' + require('./' + c).usage;
|
||||||
|
}).join('\n'),
|
||||||
|
'',
|
||||||
|
'node-pre-gyp@' + this.version + ' ' + path.resolve(__dirname, '..'),
|
||||||
|
'node@' + process.versions.node
|
||||||
|
].join('\n');
|
||||||
|
return str;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Version number getter.
|
||||||
|
*/
|
||||||
|
|
||||||
|
Object.defineProperty(proto, 'version', {
|
||||||
|
get: function() {
|
||||||
|
return this.package.version;
|
||||||
|
},
|
||||||
|
enumerable: true
|
||||||
|
});
|
||||||
73
node_modules/@mapbox/node-pre-gyp/lib/package.js
generated
vendored
Normal file
73
node_modules/@mapbox/node-pre-gyp/lib/package.js
generated
vendored
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
module.exports = exports = _package;
|
||||||
|
|
||||||
|
exports.usage = 'Packs binary (and enclosing directory) into locally staged tarball';
|
||||||
|
|
||||||
|
const fs = require('fs');
|
||||||
|
const path = require('path');
|
||||||
|
const log = require('npmlog');
|
||||||
|
const versioning = require('./util/versioning.js');
|
||||||
|
const napi = require('./util/napi.js');
|
||||||
|
const existsAsync = fs.exists || path.exists;
|
||||||
|
const makeDir = require('make-dir');
|
||||||
|
const tar = require('tar');
|
||||||
|
|
||||||
|
function readdirSync(dir) {
|
||||||
|
let list = [];
|
||||||
|
const files = fs.readdirSync(dir);
|
||||||
|
|
||||||
|
files.forEach((file) => {
|
||||||
|
const stats = fs.lstatSync(path.join(dir, file));
|
||||||
|
if (stats.isDirectory()) {
|
||||||
|
list = list.concat(readdirSync(path.join(dir, file)));
|
||||||
|
} else {
|
||||||
|
list.push(path.join(dir, file));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return list;
|
||||||
|
}
|
||||||
|
|
||||||
|
function _package(gyp, argv, callback) {
|
||||||
|
const package_json = gyp.package_json;
|
||||||
|
const napi_build_version = napi.get_napi_build_version_from_command_args(argv);
|
||||||
|
const opts = versioning.evaluate(package_json, gyp.opts, napi_build_version);
|
||||||
|
const from = opts.module_path;
|
||||||
|
const binary_module = path.join(from, opts.module_name + '.node');
|
||||||
|
existsAsync(binary_module, (found) => {
|
||||||
|
if (!found) {
|
||||||
|
return callback(new Error('Cannot package because ' + binary_module + ' missing: run `node-pre-gyp rebuild` first'));
|
||||||
|
}
|
||||||
|
const tarball = opts.staged_tarball;
|
||||||
|
const filter_func = function(entry) {
|
||||||
|
const basename = path.basename(entry);
|
||||||
|
if (basename.length && basename[0] !== '.') {
|
||||||
|
console.log('packing ' + entry);
|
||||||
|
return true;
|
||||||
|
} else {
|
||||||
|
console.log('skipping ' + entry);
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
makeDir(path.dirname(tarball)).then(() => {
|
||||||
|
let files = readdirSync(from);
|
||||||
|
const base = path.basename(from);
|
||||||
|
files = files.map((file) => {
|
||||||
|
return path.join(base, path.relative(from, file));
|
||||||
|
});
|
||||||
|
tar.create({
|
||||||
|
portable: false,
|
||||||
|
gzip: true,
|
||||||
|
filter: filter_func,
|
||||||
|
file: tarball,
|
||||||
|
cwd: path.dirname(from)
|
||||||
|
}, files, (err2) => {
|
||||||
|
if (err2) console.error('[' + package_json.name + '] ' + err2.message);
|
||||||
|
else log.info('package', 'Binary staged at "' + tarball + '"');
|
||||||
|
return callback(err2);
|
||||||
|
});
|
||||||
|
}).catch((err) => {
|
||||||
|
return callback(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
34
node_modules/@mapbox/node-pre-gyp/lib/pre-binding.js
generated
vendored
Normal file
34
node_modules/@mapbox/node-pre-gyp/lib/pre-binding.js
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
const npg = require('..');
|
||||||
|
const versioning = require('../lib/util/versioning.js');
|
||||||
|
const napi = require('../lib/util/napi.js');
|
||||||
|
const existsSync = require('fs').existsSync || require('path').existsSync;
|
||||||
|
const path = require('path');
|
||||||
|
|
||||||
|
module.exports = exports;
|
||||||
|
|
||||||
|
exports.usage = 'Finds the require path for the node-pre-gyp installed module';
|
||||||
|
|
||||||
|
exports.validate = function(package_json, opts) {
|
||||||
|
versioning.validate_config(package_json, opts);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.find = function(package_json_path, opts) {
|
||||||
|
if (!existsSync(package_json_path)) {
|
||||||
|
throw new Error(package_json_path + 'does not exist');
|
||||||
|
}
|
||||||
|
const prog = new npg.Run({ package_json_path, argv: process.argv });
|
||||||
|
prog.setBinaryHostProperty();
|
||||||
|
const package_json = prog.package_json;
|
||||||
|
|
||||||
|
versioning.validate_config(package_json, opts);
|
||||||
|
let napi_build_version;
|
||||||
|
if (napi.get_napi_build_versions(package_json, opts)) {
|
||||||
|
napi_build_version = napi.get_best_napi_build_version(package_json, opts);
|
||||||
|
}
|
||||||
|
opts = opts || {};
|
||||||
|
if (!opts.module_root) opts.module_root = path.dirname(package_json_path);
|
||||||
|
const meta = versioning.evaluate(package_json, opts, napi_build_version);
|
||||||
|
return meta.module;
|
||||||
|
};
|
||||||
81
node_modules/@mapbox/node-pre-gyp/lib/publish.js
generated
vendored
Normal file
81
node_modules/@mapbox/node-pre-gyp/lib/publish.js
generated
vendored
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
module.exports = exports = publish;
|
||||||
|
|
||||||
|
exports.usage = 'Publishes pre-built binary (requires aws-sdk)';
|
||||||
|
|
||||||
|
const fs = require('fs');
|
||||||
|
const path = require('path');
|
||||||
|
const log = require('npmlog');
|
||||||
|
const versioning = require('./util/versioning.js');
|
||||||
|
const napi = require('./util/napi.js');
|
||||||
|
const s3_setup = require('./util/s3_setup.js');
|
||||||
|
const existsAsync = fs.exists || path.exists;
|
||||||
|
const url = require('url');
|
||||||
|
|
||||||
|
function publish(gyp, argv, callback) {
|
||||||
|
const package_json = gyp.package_json;
|
||||||
|
const napi_build_version = napi.get_napi_build_version_from_command_args(argv);
|
||||||
|
const opts = versioning.evaluate(package_json, gyp.opts, napi_build_version);
|
||||||
|
const tarball = opts.staged_tarball;
|
||||||
|
existsAsync(tarball, (found) => {
|
||||||
|
if (!found) {
|
||||||
|
return callback(new Error('Cannot publish because ' + tarball + ' missing: run `node-pre-gyp package` first'));
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info('publish', 'Detecting s3 credentials');
|
||||||
|
const config = {};
|
||||||
|
s3_setup.detect(opts, config);
|
||||||
|
const s3 = s3_setup.get_s3(config);
|
||||||
|
|
||||||
|
const key_name = url.resolve(config.prefix, opts.package_name);
|
||||||
|
const s3_opts = {
|
||||||
|
Bucket: config.bucket,
|
||||||
|
Key: key_name
|
||||||
|
};
|
||||||
|
log.info('publish', 'Authenticating with s3');
|
||||||
|
log.info('publish', config);
|
||||||
|
|
||||||
|
log.info('publish', 'Checking for existing binary at ' + opts.hosted_path);
|
||||||
|
s3.headObject(s3_opts, (err, meta) => {
|
||||||
|
if (meta) log.info('publish', JSON.stringify(meta));
|
||||||
|
if (err && err.code === 'NotFound') {
|
||||||
|
// we are safe to publish because
|
||||||
|
// the object does not already exist
|
||||||
|
log.info('publish', 'Preparing to put object');
|
||||||
|
const s3_put_opts = {
|
||||||
|
ACL: 'public-read',
|
||||||
|
Body: fs.createReadStream(tarball),
|
||||||
|
Key: key_name,
|
||||||
|
Bucket: config.bucket
|
||||||
|
};
|
||||||
|
log.info('publish', 'Putting object', s3_put_opts.ACL, s3_put_opts.Bucket, s3_put_opts.Key);
|
||||||
|
try {
|
||||||
|
s3.putObject(s3_put_opts, (err2, resp) => {
|
||||||
|
log.info('publish', 'returned from putting object');
|
||||||
|
if (err2) {
|
||||||
|
log.info('publish', 's3 putObject error: "' + err2 + '"');
|
||||||
|
return callback(err2);
|
||||||
|
}
|
||||||
|
if (resp) log.info('publish', 's3 putObject response: "' + JSON.stringify(resp) + '"');
|
||||||
|
log.info('publish', 'successfully put object');
|
||||||
|
console.log('[' + package_json.name + '] published to ' + opts.hosted_path);
|
||||||
|
return callback();
|
||||||
|
});
|
||||||
|
} catch (err3) {
|
||||||
|
log.info('publish', 's3 putObject error: "' + err3 + '"');
|
||||||
|
return callback(err3);
|
||||||
|
}
|
||||||
|
} else if (err) {
|
||||||
|
log.info('publish', 's3 headObject error: "' + err + '"');
|
||||||
|
return callback(err);
|
||||||
|
} else {
|
||||||
|
log.error('publish', 'Cannot publish over existing version');
|
||||||
|
log.error('publish', "Update the 'version' field in package.json and try again");
|
||||||
|
log.error('publish', 'If the previous version was published in error see:');
|
||||||
|
log.error('publish', '\t node-pre-gyp unpublish');
|
||||||
|
return callback(new Error('Failed publishing to ' + opts.hosted_path));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
20
node_modules/@mapbox/node-pre-gyp/lib/rebuild.js
generated
vendored
Normal file
20
node_modules/@mapbox/node-pre-gyp/lib/rebuild.js
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
module.exports = exports = rebuild;
|
||||||
|
|
||||||
|
exports.usage = 'Runs "clean" and "build" at once';
|
||||||
|
|
||||||
|
const napi = require('./util/napi.js');
|
||||||
|
|
||||||
|
function rebuild(gyp, argv, callback) {
|
||||||
|
const package_json = gyp.package_json;
|
||||||
|
let commands = [
|
||||||
|
{ name: 'clean', args: [] },
|
||||||
|
{ name: 'build', args: ['rebuild'] }
|
||||||
|
];
|
||||||
|
commands = napi.expand_commands(package_json, gyp.opts, commands);
|
||||||
|
for (let i = commands.length; i !== 0; i--) {
|
||||||
|
gyp.todo.unshift(commands[i - 1]);
|
||||||
|
}
|
||||||
|
process.nextTick(callback);
|
||||||
|
}
|
||||||
19
node_modules/@mapbox/node-pre-gyp/lib/reinstall.js
generated
vendored
Normal file
19
node_modules/@mapbox/node-pre-gyp/lib/reinstall.js
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
module.exports = exports = rebuild;
|
||||||
|
|
||||||
|
exports.usage = 'Runs "clean" and "install" at once';
|
||||||
|
|
||||||
|
const napi = require('./util/napi.js');
|
||||||
|
|
||||||
|
function rebuild(gyp, argv, callback) {
|
||||||
|
const package_json = gyp.package_json;
|
||||||
|
let installArgs = [];
|
||||||
|
const napi_build_version = napi.get_best_napi_build_version(package_json, gyp.opts);
|
||||||
|
if (napi_build_version != null) installArgs = [napi.get_command_arg(napi_build_version)];
|
||||||
|
gyp.todo.unshift(
|
||||||
|
{ name: 'clean', args: [] },
|
||||||
|
{ name: 'install', args: installArgs }
|
||||||
|
);
|
||||||
|
process.nextTick(callback);
|
||||||
|
}
|
||||||
32
node_modules/@mapbox/node-pre-gyp/lib/reveal.js
generated
vendored
Normal file
32
node_modules/@mapbox/node-pre-gyp/lib/reveal.js
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
module.exports = exports = reveal;
|
||||||
|
|
||||||
|
exports.usage = 'Reveals data on the versioned binary';
|
||||||
|
|
||||||
|
const versioning = require('./util/versioning.js');
|
||||||
|
const napi = require('./util/napi.js');
|
||||||
|
|
||||||
|
function unix_paths(key, val) {
|
||||||
|
return val && val.replace ? val.replace(/\\/g, '/') : val;
|
||||||
|
}
|
||||||
|
|
||||||
|
function reveal(gyp, argv, callback) {
|
||||||
|
const package_json = gyp.package_json;
|
||||||
|
const napi_build_version = napi.get_napi_build_version_from_command_args(argv);
|
||||||
|
const opts = versioning.evaluate(package_json, gyp.opts, napi_build_version);
|
||||||
|
let hit = false;
|
||||||
|
// if a second arg is passed look to see
|
||||||
|
// if it is a known option
|
||||||
|
// console.log(JSON.stringify(gyp.opts,null,1))
|
||||||
|
const remain = gyp.opts.argv.remain[gyp.opts.argv.remain.length - 1];
|
||||||
|
if (remain && Object.hasOwnProperty.call(opts, remain)) {
|
||||||
|
console.log(opts[remain].replace(/\\/g, '/'));
|
||||||
|
hit = true;
|
||||||
|
}
|
||||||
|
// otherwise return all options as json
|
||||||
|
if (!hit) {
|
||||||
|
console.log(JSON.stringify(opts, unix_paths, 2));
|
||||||
|
}
|
||||||
|
return callback();
|
||||||
|
}
|
||||||
79
node_modules/@mapbox/node-pre-gyp/lib/testbinary.js
generated
vendored
Normal file
79
node_modules/@mapbox/node-pre-gyp/lib/testbinary.js
generated
vendored
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
module.exports = exports = testbinary;
|
||||||
|
|
||||||
|
exports.usage = 'Tests that the binary.node can be required';
|
||||||
|
|
||||||
|
const path = require('path');
|
||||||
|
const log = require('npmlog');
|
||||||
|
const cp = require('child_process');
|
||||||
|
const versioning = require('./util/versioning.js');
|
||||||
|
const napi = require('./util/napi.js');
|
||||||
|
|
||||||
|
function testbinary(gyp, argv, callback) {
|
||||||
|
const args = [];
|
||||||
|
const options = {};
|
||||||
|
let shell_cmd = process.execPath;
|
||||||
|
const package_json = gyp.package_json;
|
||||||
|
const napi_build_version = napi.get_napi_build_version_from_command_args(argv);
|
||||||
|
const opts = versioning.evaluate(package_json, gyp.opts, napi_build_version);
|
||||||
|
// skip validation for runtimes we don't explicitly support (like electron)
|
||||||
|
if (opts.runtime &&
|
||||||
|
opts.runtime !== 'node-webkit' &&
|
||||||
|
opts.runtime !== 'node') {
|
||||||
|
return callback();
|
||||||
|
}
|
||||||
|
const nw = (opts.runtime && opts.runtime === 'node-webkit');
|
||||||
|
// ensure on windows that / are used for require path
|
||||||
|
const binary_module = opts.module.replace(/\\/g, '/');
|
||||||
|
if ((process.arch !== opts.target_arch) ||
|
||||||
|
(process.platform !== opts.target_platform)) {
|
||||||
|
let msg = 'skipping validation since host platform/arch (';
|
||||||
|
msg += process.platform + '/' + process.arch + ')';
|
||||||
|
msg += ' does not match target (';
|
||||||
|
msg += opts.target_platform + '/' + opts.target_arch + ')';
|
||||||
|
log.info('validate', msg);
|
||||||
|
return callback();
|
||||||
|
}
|
||||||
|
if (nw) {
|
||||||
|
options.timeout = 5000;
|
||||||
|
if (process.platform === 'darwin') {
|
||||||
|
shell_cmd = 'node-webkit';
|
||||||
|
} else if (process.platform === 'win32') {
|
||||||
|
shell_cmd = 'nw.exe';
|
||||||
|
} else {
|
||||||
|
shell_cmd = 'nw';
|
||||||
|
}
|
||||||
|
const modulePath = path.resolve(binary_module);
|
||||||
|
const appDir = path.join(__dirname, 'util', 'nw-pre-gyp');
|
||||||
|
args.push(appDir);
|
||||||
|
args.push(modulePath);
|
||||||
|
log.info('validate', "Running test command: '" + shell_cmd + ' ' + args.join(' ') + "'");
|
||||||
|
cp.execFile(shell_cmd, args, options, (err, stdout, stderr) => {
|
||||||
|
// check for normal timeout for node-webkit
|
||||||
|
if (err) {
|
||||||
|
if (err.killed === true && err.signal && err.signal.indexOf('SIG') > -1) {
|
||||||
|
return callback();
|
||||||
|
}
|
||||||
|
const stderrLog = stderr.toString();
|
||||||
|
log.info('stderr', stderrLog);
|
||||||
|
if (/^\s*Xlib:\s*extension\s*"RANDR"\s*missing\s*on\s*display\s*":\d+\.\d+"\.\s*$/.test(stderrLog)) {
|
||||||
|
log.info('RANDR', 'stderr contains only RANDR error, ignored');
|
||||||
|
return callback();
|
||||||
|
}
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
return callback();
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
args.push('--eval');
|
||||||
|
args.push("require('" + binary_module.replace(/'/g, '\'') + "')");
|
||||||
|
log.info('validate', "Running test command: '" + shell_cmd + ' ' + args.join(' ') + "'");
|
||||||
|
cp.execFile(shell_cmd, args, options, (err, stdout, stderr) => {
|
||||||
|
if (err) {
|
||||||
|
return callback(err, { stdout: stdout, stderr: stderr });
|
||||||
|
}
|
||||||
|
return callback();
|
||||||
|
});
|
||||||
|
}
|
||||||
53
node_modules/@mapbox/node-pre-gyp/lib/testpackage.js
generated
vendored
Normal file
53
node_modules/@mapbox/node-pre-gyp/lib/testpackage.js
generated
vendored
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
module.exports = exports = testpackage;
|
||||||
|
|
||||||
|
exports.usage = 'Tests that the staged package is valid';
|
||||||
|
|
||||||
|
const fs = require('fs');
|
||||||
|
const path = require('path');
|
||||||
|
const log = require('npmlog');
|
||||||
|
const existsAsync = fs.exists || path.exists;
|
||||||
|
const versioning = require('./util/versioning.js');
|
||||||
|
const napi = require('./util/napi.js');
|
||||||
|
const testbinary = require('./testbinary.js');
|
||||||
|
const tar = require('tar');
|
||||||
|
const makeDir = require('make-dir');
|
||||||
|
|
||||||
|
function testpackage(gyp, argv, callback) {
|
||||||
|
const package_json = gyp.package_json;
|
||||||
|
const napi_build_version = napi.get_napi_build_version_from_command_args(argv);
|
||||||
|
const opts = versioning.evaluate(package_json, gyp.opts, napi_build_version);
|
||||||
|
const tarball = opts.staged_tarball;
|
||||||
|
existsAsync(tarball, (found) => {
|
||||||
|
if (!found) {
|
||||||
|
return callback(new Error('Cannot test package because ' + tarball + ' missing: run `node-pre-gyp package` first'));
|
||||||
|
}
|
||||||
|
const to = opts.module_path;
|
||||||
|
function filter_func(entry) {
|
||||||
|
log.info('install', 'unpacking [' + entry.path + ']');
|
||||||
|
}
|
||||||
|
|
||||||
|
makeDir(to).then(() => {
|
||||||
|
tar.extract({
|
||||||
|
file: tarball,
|
||||||
|
cwd: to,
|
||||||
|
strip: 1,
|
||||||
|
onentry: filter_func
|
||||||
|
}).then(after_extract, callback);
|
||||||
|
}).catch((err) => {
|
||||||
|
return callback(err);
|
||||||
|
});
|
||||||
|
|
||||||
|
function after_extract() {
|
||||||
|
testbinary(gyp, argv, (err) => {
|
||||||
|
if (err) {
|
||||||
|
return callback(err);
|
||||||
|
} else {
|
||||||
|
console.log('[' + package_json.name + '] Package appears valid');
|
||||||
|
return callback();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
41
node_modules/@mapbox/node-pre-gyp/lib/unpublish.js
generated
vendored
Normal file
41
node_modules/@mapbox/node-pre-gyp/lib/unpublish.js
generated
vendored
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
module.exports = exports = unpublish;
|
||||||
|
|
||||||
|
exports.usage = 'Unpublishes pre-built binary (requires aws-sdk)';
|
||||||
|
|
||||||
|
const log = require('npmlog');
|
||||||
|
const versioning = require('./util/versioning.js');
|
||||||
|
const napi = require('./util/napi.js');
|
||||||
|
const s3_setup = require('./util/s3_setup.js');
|
||||||
|
const url = require('url');
|
||||||
|
|
||||||
|
function unpublish(gyp, argv, callback) {
|
||||||
|
const package_json = gyp.package_json;
|
||||||
|
const napi_build_version = napi.get_napi_build_version_from_command_args(argv);
|
||||||
|
const opts = versioning.evaluate(package_json, gyp.opts, napi_build_version);
|
||||||
|
const config = {};
|
||||||
|
s3_setup.detect(opts, config);
|
||||||
|
const s3 = s3_setup.get_s3(config);
|
||||||
|
const key_name = url.resolve(config.prefix, opts.package_name);
|
||||||
|
const s3_opts = {
|
||||||
|
Bucket: config.bucket,
|
||||||
|
Key: key_name
|
||||||
|
};
|
||||||
|
s3.headObject(s3_opts, (err, meta) => {
|
||||||
|
if (err && err.code === 'NotFound') {
|
||||||
|
console.log('[' + package_json.name + '] Not found: https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + s3_opts.Key);
|
||||||
|
return callback();
|
||||||
|
} else if (err) {
|
||||||
|
return callback(err);
|
||||||
|
} else {
|
||||||
|
log.info('unpublish', JSON.stringify(meta));
|
||||||
|
s3.deleteObject(s3_opts, (err2, resp) => {
|
||||||
|
if (err2) return callback(err2);
|
||||||
|
log.info(JSON.stringify(resp));
|
||||||
|
console.log('[' + package_json.name + '] Success: removed https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + s3_opts.Key);
|
||||||
|
return callback();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
2602
node_modules/@mapbox/node-pre-gyp/lib/util/abi_crosswalk.json
generated
vendored
Normal file
2602
node_modules/@mapbox/node-pre-gyp/lib/util/abi_crosswalk.json
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
93
node_modules/@mapbox/node-pre-gyp/lib/util/compile.js
generated
vendored
Normal file
93
node_modules/@mapbox/node-pre-gyp/lib/util/compile.js
generated
vendored
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
module.exports = exports;
|
||||||
|
|
||||||
|
const fs = require('fs');
|
||||||
|
const path = require('path');
|
||||||
|
const win = process.platform === 'win32';
|
||||||
|
const existsSync = fs.existsSync || path.existsSync;
|
||||||
|
const cp = require('child_process');
|
||||||
|
|
||||||
|
// try to build up the complete path to node-gyp
|
||||||
|
/* priority:
|
||||||
|
- node-gyp on ENV:npm_config_node_gyp (https://github.com/npm/npm/pull/4887)
|
||||||
|
- node-gyp on NODE_PATH
|
||||||
|
- node-gyp inside npm on NODE_PATH (ignore on iojs)
|
||||||
|
- node-gyp inside npm beside node exe
|
||||||
|
*/
|
||||||
|
function which_node_gyp() {
|
||||||
|
let node_gyp_bin;
|
||||||
|
if (process.env.npm_config_node_gyp) {
|
||||||
|
try {
|
||||||
|
node_gyp_bin = process.env.npm_config_node_gyp;
|
||||||
|
if (existsSync(node_gyp_bin)) {
|
||||||
|
return node_gyp_bin;
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
// do nothing
|
||||||
|
}
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
const node_gyp_main = require.resolve('node-gyp'); // eslint-disable-line node/no-missing-require
|
||||||
|
node_gyp_bin = path.join(path.dirname(
|
||||||
|
path.dirname(node_gyp_main)),
|
||||||
|
'bin/node-gyp.js');
|
||||||
|
if (existsSync(node_gyp_bin)) {
|
||||||
|
return node_gyp_bin;
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
// do nothing
|
||||||
|
}
|
||||||
|
if (process.execPath.indexOf('iojs') === -1) {
|
||||||
|
try {
|
||||||
|
const npm_main = require.resolve('npm'); // eslint-disable-line node/no-missing-require
|
||||||
|
node_gyp_bin = path.join(path.dirname(
|
||||||
|
path.dirname(npm_main)),
|
||||||
|
'node_modules/node-gyp/bin/node-gyp.js');
|
||||||
|
if (existsSync(node_gyp_bin)) {
|
||||||
|
return node_gyp_bin;
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
// do nothing
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const npm_base = path.join(path.dirname(
|
||||||
|
path.dirname(process.execPath)),
|
||||||
|
'lib/node_modules/npm/');
|
||||||
|
node_gyp_bin = path.join(npm_base, 'node_modules/node-gyp/bin/node-gyp.js');
|
||||||
|
if (existsSync(node_gyp_bin)) {
|
||||||
|
return node_gyp_bin;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports.run_gyp = function(args, opts, callback) {
|
||||||
|
let shell_cmd = '';
|
||||||
|
const cmd_args = [];
|
||||||
|
if (opts.runtime && opts.runtime === 'node-webkit') {
|
||||||
|
shell_cmd = 'nw-gyp';
|
||||||
|
if (win) shell_cmd += '.cmd';
|
||||||
|
} else {
|
||||||
|
const node_gyp_path = which_node_gyp();
|
||||||
|
if (node_gyp_path) {
|
||||||
|
shell_cmd = process.execPath;
|
||||||
|
cmd_args.push(node_gyp_path);
|
||||||
|
} else {
|
||||||
|
shell_cmd = 'node-gyp';
|
||||||
|
if (win) shell_cmd += '.cmd';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const final_args = cmd_args.concat(args);
|
||||||
|
const cmd = cp.spawn(shell_cmd, final_args, { cwd: undefined, env: process.env, stdio: [0, 1, 2] });
|
||||||
|
cmd.on('error', (err) => {
|
||||||
|
if (err) {
|
||||||
|
return callback(new Error("Failed to execute '" + shell_cmd + ' ' + final_args.join(' ') + "' (" + err + ')'));
|
||||||
|
}
|
||||||
|
callback(null, opts);
|
||||||
|
});
|
||||||
|
cmd.on('close', (code) => {
|
||||||
|
if (code && code !== 0) {
|
||||||
|
return callback(new Error("Failed to execute '" + shell_cmd + ' ' + final_args.join(' ') + "' (" + code + ')'));
|
||||||
|
}
|
||||||
|
callback(null, opts);
|
||||||
|
});
|
||||||
|
};
|
||||||
102
node_modules/@mapbox/node-pre-gyp/lib/util/handle_gyp_opts.js
generated
vendored
Normal file
102
node_modules/@mapbox/node-pre-gyp/lib/util/handle_gyp_opts.js
generated
vendored
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
module.exports = exports = handle_gyp_opts;
|
||||||
|
|
||||||
|
const versioning = require('./versioning.js');
|
||||||
|
const napi = require('./napi.js');
|
||||||
|
|
||||||
|
/*
|
||||||
|
|
||||||
|
Here we gather node-pre-gyp generated options (from versioning) and pass them along to node-gyp.
|
||||||
|
|
||||||
|
We massage the args and options slightly to account for differences in what commands mean between
|
||||||
|
node-pre-gyp and node-gyp (e.g. see the difference between "build" and "rebuild" below)
|
||||||
|
|
||||||
|
Keep in mind: the values inside `argv` and `gyp.opts` below are different depending on whether
|
||||||
|
node-pre-gyp is called directory, or if it is called in a `run-script` phase of npm.
|
||||||
|
|
||||||
|
We also try to preserve any command line options that might have been passed to npm or node-pre-gyp.
|
||||||
|
But this is fairly difficult without passing way to much through. For example `gyp.opts` contains all
|
||||||
|
the process.env and npm pushes a lot of variables into process.env which node-pre-gyp inherits. So we have
|
||||||
|
to be very selective about what we pass through.
|
||||||
|
|
||||||
|
For example:
|
||||||
|
|
||||||
|
`npm install --build-from-source` will give:
|
||||||
|
|
||||||
|
argv == [ 'rebuild' ]
|
||||||
|
gyp.opts.argv == { remain: [ 'install' ],
|
||||||
|
cooked: [ 'install', '--fallback-to-build' ],
|
||||||
|
original: [ 'install', '--fallback-to-build' ] }
|
||||||
|
|
||||||
|
`./bin/node-pre-gyp build` will give:
|
||||||
|
|
||||||
|
argv == []
|
||||||
|
gyp.opts.argv == { remain: [ 'build' ],
|
||||||
|
cooked: [ 'build' ],
|
||||||
|
original: [ '-C', 'test/app1', 'build' ] }
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
// select set of node-pre-gyp versioning info
|
||||||
|
// to share with node-gyp
|
||||||
|
const share_with_node_gyp = [
|
||||||
|
'module',
|
||||||
|
'module_name',
|
||||||
|
'module_path',
|
||||||
|
'napi_version',
|
||||||
|
'node_abi_napi',
|
||||||
|
'napi_build_version',
|
||||||
|
'node_napi_label'
|
||||||
|
];
|
||||||
|
|
||||||
|
function handle_gyp_opts(gyp, argv, callback) {
|
||||||
|
|
||||||
|
// Collect node-pre-gyp specific variables to pass to node-gyp
|
||||||
|
const node_pre_gyp_options = [];
|
||||||
|
// generate custom node-pre-gyp versioning info
|
||||||
|
const napi_build_version = napi.get_napi_build_version_from_command_args(argv);
|
||||||
|
const opts = versioning.evaluate(gyp.package_json, gyp.opts, napi_build_version);
|
||||||
|
share_with_node_gyp.forEach((key) => {
|
||||||
|
const val = opts[key];
|
||||||
|
if (val) {
|
||||||
|
node_pre_gyp_options.push('--' + key + '=' + val);
|
||||||
|
} else if (key === 'napi_build_version') {
|
||||||
|
node_pre_gyp_options.push('--' + key + '=0');
|
||||||
|
} else {
|
||||||
|
if (key !== 'napi_version' && key !== 'node_abi_napi')
|
||||||
|
return callback(new Error('Option ' + key + ' required but not found by node-pre-gyp'));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Collect options that follow the special -- which disables nopt parsing
|
||||||
|
const unparsed_options = [];
|
||||||
|
let double_hyphen_found = false;
|
||||||
|
gyp.opts.argv.original.forEach((opt) => {
|
||||||
|
if (double_hyphen_found) {
|
||||||
|
unparsed_options.push(opt);
|
||||||
|
}
|
||||||
|
if (opt === '--') {
|
||||||
|
double_hyphen_found = true;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// We try respect and pass through remaining command
|
||||||
|
// line options (like --foo=bar) to node-gyp
|
||||||
|
const cooked = gyp.opts.argv.cooked;
|
||||||
|
const node_gyp_options = [];
|
||||||
|
cooked.forEach((value) => {
|
||||||
|
if (value.length > 2 && value.slice(0, 2) === '--') {
|
||||||
|
const key = value.slice(2);
|
||||||
|
const val = cooked[cooked.indexOf(value) + 1];
|
||||||
|
if (val && val.indexOf('--') === -1) { // handle '--foo=bar' or ['--foo','bar']
|
||||||
|
node_gyp_options.push('--' + key + '=' + val);
|
||||||
|
} else { // pass through --foo
|
||||||
|
node_gyp_options.push(value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = { 'opts': opts, 'gyp': node_gyp_options, 'pre': node_pre_gyp_options, 'unparsed': unparsed_options };
|
||||||
|
return callback(null, result);
|
||||||
|
}
|
||||||
205
node_modules/@mapbox/node-pre-gyp/lib/util/napi.js
generated
vendored
Normal file
205
node_modules/@mapbox/node-pre-gyp/lib/util/napi.js
generated
vendored
Normal file
@@ -0,0 +1,205 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
const fs = require('fs');
|
||||||
|
|
||||||
|
module.exports = exports;
|
||||||
|
|
||||||
|
const versionArray = process.version
|
||||||
|
.substr(1)
|
||||||
|
.replace(/-.*$/, '')
|
||||||
|
.split('.')
|
||||||
|
.map((item) => {
|
||||||
|
return +item;
|
||||||
|
});
|
||||||
|
|
||||||
|
const napi_multiple_commands = [
|
||||||
|
'build',
|
||||||
|
'clean',
|
||||||
|
'configure',
|
||||||
|
'package',
|
||||||
|
'publish',
|
||||||
|
'reveal',
|
||||||
|
'testbinary',
|
||||||
|
'testpackage',
|
||||||
|
'unpublish'
|
||||||
|
];
|
||||||
|
|
||||||
|
const napi_build_version_tag = 'napi_build_version=';
|
||||||
|
|
||||||
|
module.exports.get_napi_version = function() {
|
||||||
|
// returns the non-zero numeric napi version or undefined if napi is not supported.
|
||||||
|
// correctly supporting target requires an updated cross-walk
|
||||||
|
let version = process.versions.napi; // can be undefined
|
||||||
|
if (!version) { // this code should never need to be updated
|
||||||
|
if (versionArray[0] === 9 && versionArray[1] >= 3) version = 2; // 9.3.0+
|
||||||
|
else if (versionArray[0] === 8) version = 1; // 8.0.0+
|
||||||
|
}
|
||||||
|
return version;
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports.get_napi_version_as_string = function(target) {
|
||||||
|
// returns the napi version as a string or an empty string if napi is not supported.
|
||||||
|
const version = module.exports.get_napi_version(target);
|
||||||
|
return version ? '' + version : '';
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports.validate_package_json = function(package_json, opts) { // throws Error
|
||||||
|
|
||||||
|
const binary = package_json.binary;
|
||||||
|
const module_path_ok = pathOK(binary.module_path);
|
||||||
|
const remote_path_ok = pathOK(binary.remote_path);
|
||||||
|
const package_name_ok = pathOK(binary.package_name);
|
||||||
|
const napi_build_versions = module.exports.get_napi_build_versions(package_json, opts, true);
|
||||||
|
const napi_build_versions_raw = module.exports.get_napi_build_versions_raw(package_json);
|
||||||
|
|
||||||
|
if (napi_build_versions) {
|
||||||
|
napi_build_versions.forEach((napi_build_version)=> {
|
||||||
|
if (!(parseInt(napi_build_version, 10) === napi_build_version && napi_build_version > 0)) {
|
||||||
|
throw new Error('All values specified in napi_versions must be positive integers.');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (napi_build_versions && (!module_path_ok || (!remote_path_ok && !package_name_ok))) {
|
||||||
|
throw new Error('When napi_versions is specified; module_path and either remote_path or ' +
|
||||||
|
"package_name must contain the substitution string '{napi_build_version}`.");
|
||||||
|
}
|
||||||
|
|
||||||
|
if ((module_path_ok || remote_path_ok || package_name_ok) && !napi_build_versions_raw) {
|
||||||
|
throw new Error("When the substitution string '{napi_build_version}` is specified in " +
|
||||||
|
'module_path, remote_path, or package_name; napi_versions must also be specified.');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (napi_build_versions && !module.exports.get_best_napi_build_version(package_json, opts) &&
|
||||||
|
module.exports.build_napi_only(package_json)) {
|
||||||
|
throw new Error(
|
||||||
|
'The Node-API version of this Node instance is ' + module.exports.get_napi_version(opts ? opts.target : undefined) + '. ' +
|
||||||
|
'This module supports Node-API version(s) ' + module.exports.get_napi_build_versions_raw(package_json) + '. ' +
|
||||||
|
'This Node instance cannot run this module.');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (napi_build_versions_raw && !napi_build_versions && module.exports.build_napi_only(package_json)) {
|
||||||
|
throw new Error(
|
||||||
|
'The Node-API version of this Node instance is ' + module.exports.get_napi_version(opts ? opts.target : undefined) + '. ' +
|
||||||
|
'This module supports Node-API version(s) ' + module.exports.get_napi_build_versions_raw(package_json) + '. ' +
|
||||||
|
'This Node instance cannot run this module.');
|
||||||
|
}
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
function pathOK(path) {
|
||||||
|
return path && (path.indexOf('{napi_build_version}') !== -1 || path.indexOf('{node_napi_label}') !== -1);
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports.expand_commands = function(package_json, opts, commands) {
|
||||||
|
const expanded_commands = [];
|
||||||
|
const napi_build_versions = module.exports.get_napi_build_versions(package_json, opts);
|
||||||
|
commands.forEach((command)=> {
|
||||||
|
if (napi_build_versions && command.name === 'install') {
|
||||||
|
const napi_build_version = module.exports.get_best_napi_build_version(package_json, opts);
|
||||||
|
const args = napi_build_version ? [napi_build_version_tag + napi_build_version] : [];
|
||||||
|
expanded_commands.push({ name: command.name, args: args });
|
||||||
|
} else if (napi_build_versions && napi_multiple_commands.indexOf(command.name) !== -1) {
|
||||||
|
napi_build_versions.forEach((napi_build_version)=> {
|
||||||
|
const args = command.args.slice();
|
||||||
|
args.push(napi_build_version_tag + napi_build_version);
|
||||||
|
expanded_commands.push({ name: command.name, args: args });
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
expanded_commands.push(command);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return expanded_commands;
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports.get_napi_build_versions = function(package_json, opts, warnings) { // opts may be undefined
|
||||||
|
const log = require('npmlog');
|
||||||
|
let napi_build_versions = [];
|
||||||
|
const supported_napi_version = module.exports.get_napi_version(opts ? opts.target : undefined);
|
||||||
|
// remove duplicates, verify each napi version can actaully be built
|
||||||
|
if (package_json.binary && package_json.binary.napi_versions) {
|
||||||
|
package_json.binary.napi_versions.forEach((napi_version) => {
|
||||||
|
const duplicated = napi_build_versions.indexOf(napi_version) !== -1;
|
||||||
|
if (!duplicated && supported_napi_version && napi_version <= supported_napi_version) {
|
||||||
|
napi_build_versions.push(napi_version);
|
||||||
|
} else if (warnings && !duplicated && supported_napi_version) {
|
||||||
|
log.info('This Node instance does not support builds for Node-API version', napi_version);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (opts && opts['build-latest-napi-version-only']) {
|
||||||
|
let latest_version = 0;
|
||||||
|
napi_build_versions.forEach((napi_version) => {
|
||||||
|
if (napi_version > latest_version) latest_version = napi_version;
|
||||||
|
});
|
||||||
|
napi_build_versions = latest_version ? [latest_version] : [];
|
||||||
|
}
|
||||||
|
return napi_build_versions.length ? napi_build_versions : undefined;
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports.get_napi_build_versions_raw = function(package_json) {
|
||||||
|
const napi_build_versions = [];
|
||||||
|
// remove duplicates
|
||||||
|
if (package_json.binary && package_json.binary.napi_versions) {
|
||||||
|
package_json.binary.napi_versions.forEach((napi_version) => {
|
||||||
|
if (napi_build_versions.indexOf(napi_version) === -1) {
|
||||||
|
napi_build_versions.push(napi_version);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return napi_build_versions.length ? napi_build_versions : undefined;
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports.get_command_arg = function(napi_build_version) {
|
||||||
|
return napi_build_version_tag + napi_build_version;
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports.get_napi_build_version_from_command_args = function(command_args) {
|
||||||
|
for (let i = 0; i < command_args.length; i++) {
|
||||||
|
const arg = command_args[i];
|
||||||
|
if (arg.indexOf(napi_build_version_tag) === 0) {
|
||||||
|
return parseInt(arg.substr(napi_build_version_tag.length), 10);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports.swap_build_dir_out = function(napi_build_version) {
|
||||||
|
if (napi_build_version) {
|
||||||
|
const rm = require('rimraf');
|
||||||
|
rm.sync(module.exports.get_build_dir(napi_build_version));
|
||||||
|
fs.renameSync('build', module.exports.get_build_dir(napi_build_version));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports.swap_build_dir_in = function(napi_build_version) {
|
||||||
|
if (napi_build_version) {
|
||||||
|
const rm = require('rimraf');
|
||||||
|
rm.sync('build');
|
||||||
|
fs.renameSync(module.exports.get_build_dir(napi_build_version), 'build');
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports.get_build_dir = function(napi_build_version) {
|
||||||
|
return 'build-tmp-napi-v' + napi_build_version;
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports.get_best_napi_build_version = function(package_json, opts) {
|
||||||
|
let best_napi_build_version = 0;
|
||||||
|
const napi_build_versions = module.exports.get_napi_build_versions(package_json, opts);
|
||||||
|
if (napi_build_versions) {
|
||||||
|
const our_napi_version = module.exports.get_napi_version(opts ? opts.target : undefined);
|
||||||
|
napi_build_versions.forEach((napi_build_version)=> {
|
||||||
|
if (napi_build_version > best_napi_build_version &&
|
||||||
|
napi_build_version <= our_napi_version) {
|
||||||
|
best_napi_build_version = napi_build_version;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return best_napi_build_version === 0 ? undefined : best_napi_build_version;
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports.build_napi_only = function(package_json) {
|
||||||
|
return package_json.binary && package_json.binary.package_name &&
|
||||||
|
package_json.binary.package_name.indexOf('{node_napi_label}') === -1;
|
||||||
|
};
|
||||||
26
node_modules/@mapbox/node-pre-gyp/lib/util/nw-pre-gyp/index.html
generated
vendored
Normal file
26
node_modules/@mapbox/node-pre-gyp/lib/util/nw-pre-gyp/index.html
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
<!doctype html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<title>Node-webkit-based module test</title>
|
||||||
|
<script>
|
||||||
|
function nwModuleTest(){
|
||||||
|
var util = require('util');
|
||||||
|
var moduleFolder = require('nw.gui').App.argv[0];
|
||||||
|
try {
|
||||||
|
require(moduleFolder);
|
||||||
|
} catch(e) {
|
||||||
|
if( process.platform !== 'win32' ){
|
||||||
|
util.log('nw-pre-gyp error:');
|
||||||
|
util.log(e.stack);
|
||||||
|
}
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
process.exit(0);
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
</head>
|
||||||
|
<body onload="nwModuleTest()">
|
||||||
|
<h1>Node-webkit-based module test</h1>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
9
node_modules/@mapbox/node-pre-gyp/lib/util/nw-pre-gyp/package.json
generated
vendored
Normal file
9
node_modules/@mapbox/node-pre-gyp/lib/util/nw-pre-gyp/package.json
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
{
|
||||||
|
"main": "index.html",
|
||||||
|
"name": "nw-pre-gyp-module-test",
|
||||||
|
"description": "Node-webkit-based module test.",
|
||||||
|
"version": "0.0.1",
|
||||||
|
"window": {
|
||||||
|
"show": false
|
||||||
|
}
|
||||||
|
}
|
||||||
163
node_modules/@mapbox/node-pre-gyp/lib/util/s3_setup.js
generated
vendored
Normal file
163
node_modules/@mapbox/node-pre-gyp/lib/util/s3_setup.js
generated
vendored
Normal file
@@ -0,0 +1,163 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
module.exports = exports;
|
||||||
|
|
||||||
|
const url = require('url');
|
||||||
|
const fs = require('fs');
|
||||||
|
const path = require('path');
|
||||||
|
|
||||||
|
module.exports.detect = function(opts, config) {
|
||||||
|
const to = opts.hosted_path;
|
||||||
|
const uri = url.parse(to);
|
||||||
|
config.prefix = (!uri.pathname || uri.pathname === '/') ? '' : uri.pathname.replace('/', '');
|
||||||
|
if (opts.bucket && opts.region) {
|
||||||
|
config.bucket = opts.bucket;
|
||||||
|
config.region = opts.region;
|
||||||
|
config.endpoint = opts.host;
|
||||||
|
config.s3ForcePathStyle = opts.s3ForcePathStyle;
|
||||||
|
} else {
|
||||||
|
const parts = uri.hostname.split('.s3');
|
||||||
|
const bucket = parts[0];
|
||||||
|
if (!bucket) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (!config.bucket) {
|
||||||
|
config.bucket = bucket;
|
||||||
|
}
|
||||||
|
if (!config.region) {
|
||||||
|
const region = parts[1].slice(1).split('.')[0];
|
||||||
|
if (region === 'amazonaws') {
|
||||||
|
config.region = 'us-east-1';
|
||||||
|
} else {
|
||||||
|
config.region = region;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports.get_s3 = function(config) {
|
||||||
|
|
||||||
|
if (process.env.node_pre_gyp_mock_s3) {
|
||||||
|
// here we're mocking. node_pre_gyp_mock_s3 is the scratch directory
|
||||||
|
// for the mock code.
|
||||||
|
const AWSMock = require('mock-aws-s3');
|
||||||
|
const os = require('os');
|
||||||
|
|
||||||
|
AWSMock.config.basePath = `${os.tmpdir()}/mock`;
|
||||||
|
|
||||||
|
const s3 = AWSMock.S3();
|
||||||
|
|
||||||
|
// wrapped callback maker. fs calls return code of ENOENT but AWS.S3 returns
|
||||||
|
// NotFound.
|
||||||
|
const wcb = (fn) => (err, ...args) => {
|
||||||
|
if (err && err.code === 'ENOENT') {
|
||||||
|
err.code = 'NotFound';
|
||||||
|
}
|
||||||
|
return fn(err, ...args);
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
listObjects(params, callback) {
|
||||||
|
return s3.listObjects(params, wcb(callback));
|
||||||
|
},
|
||||||
|
headObject(params, callback) {
|
||||||
|
return s3.headObject(params, wcb(callback));
|
||||||
|
},
|
||||||
|
deleteObject(params, callback) {
|
||||||
|
return s3.deleteObject(params, wcb(callback));
|
||||||
|
},
|
||||||
|
putObject(params, callback) {
|
||||||
|
return s3.putObject(params, wcb(callback));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// if not mocking then setup real s3.
|
||||||
|
const AWS = require('aws-sdk');
|
||||||
|
|
||||||
|
AWS.config.update(config);
|
||||||
|
const s3 = new AWS.S3();
|
||||||
|
|
||||||
|
// need to change if additional options need to be specified.
|
||||||
|
return {
|
||||||
|
listObjects(params, callback) {
|
||||||
|
return s3.listObjects(params, callback);
|
||||||
|
},
|
||||||
|
headObject(params, callback) {
|
||||||
|
return s3.headObject(params, callback);
|
||||||
|
},
|
||||||
|
deleteObject(params, callback) {
|
||||||
|
return s3.deleteObject(params, callback);
|
||||||
|
},
|
||||||
|
putObject(params, callback) {
|
||||||
|
return s3.putObject(params, callback);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
//
|
||||||
|
// function to get the mocking control function. if not mocking it returns a no-op.
|
||||||
|
//
|
||||||
|
// if mocking it sets up the mock http interceptors that use the mocked s3 file system
|
||||||
|
// to fulfill reponses.
|
||||||
|
module.exports.get_mockS3Http = function() {
|
||||||
|
let mock_s3 = false;
|
||||||
|
if (!process.env.node_pre_gyp_mock_s3) {
|
||||||
|
return () => mock_s3;
|
||||||
|
}
|
||||||
|
|
||||||
|
const nock = require('nock');
|
||||||
|
// the bucket used for testing, as addressed by https.
|
||||||
|
const host = 'https://mapbox-node-pre-gyp-public-testing-bucket.s3.us-east-1.amazonaws.com';
|
||||||
|
const mockDir = process.env.node_pre_gyp_mock_s3 + '/mapbox-node-pre-gyp-public-testing-bucket';
|
||||||
|
|
||||||
|
// function to setup interceptors. they are "turned off" by setting mock_s3 to false.
|
||||||
|
const mock_http = () => {
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
function get(uri, requestBody) {
|
||||||
|
const filepath = path.join(mockDir, uri.replace('%2B', '+'));
|
||||||
|
|
||||||
|
try {
|
||||||
|
fs.accessSync(filepath, fs.constants.R_OK);
|
||||||
|
} catch (e) {
|
||||||
|
return [404, 'not found\n'];
|
||||||
|
}
|
||||||
|
|
||||||
|
// the mock s3 functions just write to disk, so just read from it.
|
||||||
|
return [200, fs.createReadStream(filepath)];
|
||||||
|
}
|
||||||
|
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
return nock(host)
|
||||||
|
.persist()
|
||||||
|
.get(() => mock_s3) // mock any uri for s3 when true
|
||||||
|
.reply(get);
|
||||||
|
};
|
||||||
|
|
||||||
|
// setup interceptors. they check the mock_s3 flag to determine whether to intercept.
|
||||||
|
mock_http(nock, host, mockDir);
|
||||||
|
// function to turn matching all requests to s3 on/off.
|
||||||
|
const mockS3Http = (action) => {
|
||||||
|
const previous = mock_s3;
|
||||||
|
if (action === 'off') {
|
||||||
|
mock_s3 = false;
|
||||||
|
} else if (action === 'on') {
|
||||||
|
mock_s3 = true;
|
||||||
|
} else if (action !== 'get') {
|
||||||
|
throw new Error(`illegal action for setMockHttp ${action}`);
|
||||||
|
}
|
||||||
|
return previous;
|
||||||
|
};
|
||||||
|
|
||||||
|
// call mockS3Http with the argument
|
||||||
|
// - 'on' - turn it on
|
||||||
|
// - 'off' - turn it off (used by fetch.test.js so it doesn't interfere with redirects)
|
||||||
|
// - 'get' - return true or false for 'on' or 'off'
|
||||||
|
return mockS3Http;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
335
node_modules/@mapbox/node-pre-gyp/lib/util/versioning.js
generated
vendored
Normal file
335
node_modules/@mapbox/node-pre-gyp/lib/util/versioning.js
generated
vendored
Normal file
@@ -0,0 +1,335 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
module.exports = exports;
|
||||||
|
|
||||||
|
const path = require('path');
|
||||||
|
const semver = require('semver');
|
||||||
|
const url = require('url');
|
||||||
|
const detect_libc = require('detect-libc');
|
||||||
|
const napi = require('./napi.js');
|
||||||
|
|
||||||
|
let abi_crosswalk;
|
||||||
|
|
||||||
|
// This is used for unit testing to provide a fake
|
||||||
|
// ABI crosswalk that emulates one that is not updated
|
||||||
|
// for the current version
|
||||||
|
if (process.env.NODE_PRE_GYP_ABI_CROSSWALK) {
|
||||||
|
abi_crosswalk = require(process.env.NODE_PRE_GYP_ABI_CROSSWALK);
|
||||||
|
} else {
|
||||||
|
abi_crosswalk = require('./abi_crosswalk.json');
|
||||||
|
}
|
||||||
|
|
||||||
|
const major_versions = {};
|
||||||
|
Object.keys(abi_crosswalk).forEach((v) => {
|
||||||
|
const major = v.split('.')[0];
|
||||||
|
if (!major_versions[major]) {
|
||||||
|
major_versions[major] = v;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
function get_electron_abi(runtime, target_version) {
|
||||||
|
if (!runtime) {
|
||||||
|
throw new Error('get_electron_abi requires valid runtime arg');
|
||||||
|
}
|
||||||
|
if (typeof target_version === 'undefined') {
|
||||||
|
// erroneous CLI call
|
||||||
|
throw new Error('Empty target version is not supported if electron is the target.');
|
||||||
|
}
|
||||||
|
// Electron guarantees that patch version update won't break native modules.
|
||||||
|
const sem_ver = semver.parse(target_version);
|
||||||
|
return runtime + '-v' + sem_ver.major + '.' + sem_ver.minor;
|
||||||
|
}
|
||||||
|
module.exports.get_electron_abi = get_electron_abi;
|
||||||
|
|
||||||
|
function get_node_webkit_abi(runtime, target_version) {
|
||||||
|
if (!runtime) {
|
||||||
|
throw new Error('get_node_webkit_abi requires valid runtime arg');
|
||||||
|
}
|
||||||
|
if (typeof target_version === 'undefined') {
|
||||||
|
// erroneous CLI call
|
||||||
|
throw new Error('Empty target version is not supported if node-webkit is the target.');
|
||||||
|
}
|
||||||
|
return runtime + '-v' + target_version;
|
||||||
|
}
|
||||||
|
module.exports.get_node_webkit_abi = get_node_webkit_abi;
|
||||||
|
|
||||||
|
function get_node_abi(runtime, versions) {
|
||||||
|
if (!runtime) {
|
||||||
|
throw new Error('get_node_abi requires valid runtime arg');
|
||||||
|
}
|
||||||
|
if (!versions) {
|
||||||
|
throw new Error('get_node_abi requires valid process.versions object');
|
||||||
|
}
|
||||||
|
const sem_ver = semver.parse(versions.node);
|
||||||
|
if (sem_ver.major === 0 && sem_ver.minor % 2) { // odd series
|
||||||
|
// https://github.com/mapbox/node-pre-gyp/issues/124
|
||||||
|
return runtime + '-v' + versions.node;
|
||||||
|
} else {
|
||||||
|
// process.versions.modules added in >= v0.10.4 and v0.11.7
|
||||||
|
// https://github.com/joyent/node/commit/ccabd4a6fa8a6eb79d29bc3bbe9fe2b6531c2d8e
|
||||||
|
return versions.modules ? runtime + '-v' + (+versions.modules) :
|
||||||
|
'v8-' + versions.v8.split('.').slice(0, 2).join('.');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
module.exports.get_node_abi = get_node_abi;
|
||||||
|
|
||||||
|
function get_runtime_abi(runtime, target_version) {
|
||||||
|
if (!runtime) {
|
||||||
|
throw new Error('get_runtime_abi requires valid runtime arg');
|
||||||
|
}
|
||||||
|
if (runtime === 'node-webkit') {
|
||||||
|
return get_node_webkit_abi(runtime, target_version || process.versions['node-webkit']);
|
||||||
|
} else if (runtime === 'electron') {
|
||||||
|
return get_electron_abi(runtime, target_version || process.versions.electron);
|
||||||
|
} else {
|
||||||
|
if (runtime !== 'node') {
|
||||||
|
throw new Error("Unknown Runtime: '" + runtime + "'");
|
||||||
|
}
|
||||||
|
if (!target_version) {
|
||||||
|
return get_node_abi(runtime, process.versions);
|
||||||
|
} else {
|
||||||
|
let cross_obj;
|
||||||
|
// abi_crosswalk generated with ./scripts/abi_crosswalk.js
|
||||||
|
if (abi_crosswalk[target_version]) {
|
||||||
|
cross_obj = abi_crosswalk[target_version];
|
||||||
|
} else {
|
||||||
|
const target_parts = target_version.split('.').map((i) => { return +i; });
|
||||||
|
if (target_parts.length !== 3) { // parse failed
|
||||||
|
throw new Error('Unknown target version: ' + target_version);
|
||||||
|
}
|
||||||
|
/*
|
||||||
|
The below code tries to infer the last known ABI compatible version
|
||||||
|
that we have recorded in the abi_crosswalk.json when an exact match
|
||||||
|
is not possible. The reasons for this to exist are complicated:
|
||||||
|
|
||||||
|
- We support passing --target to be able to allow developers to package binaries for versions of node
|
||||||
|
that are not the same one as they are running. This might also be used in combination with the
|
||||||
|
--target_arch or --target_platform flags to also package binaries for alternative platforms
|
||||||
|
- When --target is passed we can't therefore determine the ABI (process.versions.modules) from the node
|
||||||
|
version that is running in memory
|
||||||
|
- So, therefore node-pre-gyp keeps an "ABI crosswalk" (lib/util/abi_crosswalk.json) to be able to look
|
||||||
|
this info up for all versions
|
||||||
|
- But we cannot easily predict what the future ABI will be for released versions
|
||||||
|
- And node-pre-gyp needs to be a `bundledDependency` in apps that depend on it in order to work correctly
|
||||||
|
by being fully available at install time.
|
||||||
|
- So, the speed of node releases and the bundled nature of node-pre-gyp mean that a new node-pre-gyp release
|
||||||
|
need to happen for every node.js/io.js/node-webkit/nw.js/atom-shell/etc release that might come online if
|
||||||
|
you want the `--target` flag to keep working for the latest version
|
||||||
|
- Which is impractical ^^
|
||||||
|
- Hence the below code guesses about future ABI to make the need to update node-pre-gyp less demanding.
|
||||||
|
|
||||||
|
In practice then you can have a dependency of your app like `node-sqlite3` that bundles a `node-pre-gyp` that
|
||||||
|
only knows about node v0.10.33 in the `abi_crosswalk.json` but target node v0.10.34 (which is assumed to be
|
||||||
|
ABI compatible with v0.10.33).
|
||||||
|
|
||||||
|
TODO: use semver module instead of custom version parsing
|
||||||
|
*/
|
||||||
|
const major = target_parts[0];
|
||||||
|
let minor = target_parts[1];
|
||||||
|
let patch = target_parts[2];
|
||||||
|
// io.js: yeah if node.js ever releases 1.x this will break
|
||||||
|
// but that is unlikely to happen: https://github.com/iojs/io.js/pull/253#issuecomment-69432616
|
||||||
|
if (major === 1) {
|
||||||
|
// look for last release that is the same major version
|
||||||
|
// e.g. we assume io.js 1.x is ABI compatible with >= 1.0.0
|
||||||
|
while (true) {
|
||||||
|
if (minor > 0) --minor;
|
||||||
|
if (patch > 0) --patch;
|
||||||
|
const new_iojs_target = '' + major + '.' + minor + '.' + patch;
|
||||||
|
if (abi_crosswalk[new_iojs_target]) {
|
||||||
|
cross_obj = abi_crosswalk[new_iojs_target];
|
||||||
|
console.log('Warning: node-pre-gyp could not find exact match for ' + target_version);
|
||||||
|
console.log('Warning: but node-pre-gyp successfully choose ' + new_iojs_target + ' as ABI compatible target');
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (minor === 0 && patch === 0) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (major >= 2) {
|
||||||
|
// look for last release that is the same major version
|
||||||
|
if (major_versions[major]) {
|
||||||
|
cross_obj = abi_crosswalk[major_versions[major]];
|
||||||
|
console.log('Warning: node-pre-gyp could not find exact match for ' + target_version);
|
||||||
|
console.log('Warning: but node-pre-gyp successfully choose ' + major_versions[major] + ' as ABI compatible target');
|
||||||
|
}
|
||||||
|
} else if (major === 0) { // node.js
|
||||||
|
if (target_parts[1] % 2 === 0) { // for stable/even node.js series
|
||||||
|
// look for the last release that is the same minor release
|
||||||
|
// e.g. we assume node 0.10.x is ABI compatible with >= 0.10.0
|
||||||
|
while (--patch > 0) {
|
||||||
|
const new_node_target = '' + major + '.' + minor + '.' + patch;
|
||||||
|
if (abi_crosswalk[new_node_target]) {
|
||||||
|
cross_obj = abi_crosswalk[new_node_target];
|
||||||
|
console.log('Warning: node-pre-gyp could not find exact match for ' + target_version);
|
||||||
|
console.log('Warning: but node-pre-gyp successfully choose ' + new_node_target + ' as ABI compatible target');
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!cross_obj) {
|
||||||
|
throw new Error('Unsupported target version: ' + target_version);
|
||||||
|
}
|
||||||
|
// emulate process.versions
|
||||||
|
const versions_obj = {
|
||||||
|
node: target_version,
|
||||||
|
v8: cross_obj.v8 + '.0',
|
||||||
|
// abi_crosswalk uses 1 for node versions lacking process.versions.modules
|
||||||
|
// process.versions.modules added in >= v0.10.4 and v0.11.7
|
||||||
|
modules: cross_obj.node_abi > 1 ? cross_obj.node_abi : undefined
|
||||||
|
};
|
||||||
|
return get_node_abi(runtime, versions_obj);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
module.exports.get_runtime_abi = get_runtime_abi;
|
||||||
|
|
||||||
|
const required_parameters = [
|
||||||
|
'module_name',
|
||||||
|
'module_path',
|
||||||
|
'host'
|
||||||
|
];
|
||||||
|
|
||||||
|
function validate_config(package_json, opts) {
|
||||||
|
const msg = package_json.name + ' package.json is not node-pre-gyp ready:\n';
|
||||||
|
const missing = [];
|
||||||
|
if (!package_json.main) {
|
||||||
|
missing.push('main');
|
||||||
|
}
|
||||||
|
if (!package_json.version) {
|
||||||
|
missing.push('version');
|
||||||
|
}
|
||||||
|
if (!package_json.name) {
|
||||||
|
missing.push('name');
|
||||||
|
}
|
||||||
|
if (!package_json.binary) {
|
||||||
|
missing.push('binary');
|
||||||
|
}
|
||||||
|
const o = package_json.binary;
|
||||||
|
if (o) {
|
||||||
|
required_parameters.forEach((p) => {
|
||||||
|
if (!o[p] || typeof o[p] !== 'string') {
|
||||||
|
missing.push('binary.' + p);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (missing.length >= 1) {
|
||||||
|
throw new Error(msg + 'package.json must declare these properties: \n' + missing.join('\n'));
|
||||||
|
}
|
||||||
|
if (o) {
|
||||||
|
// enforce https over http
|
||||||
|
const protocol = url.parse(o.host).protocol;
|
||||||
|
if (protocol === 'http:') {
|
||||||
|
throw new Error("'host' protocol (" + protocol + ") is invalid - only 'https:' is accepted");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
napi.validate_package_json(package_json, opts);
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports.validate_config = validate_config;
|
||||||
|
|
||||||
|
function eval_template(template, opts) {
|
||||||
|
Object.keys(opts).forEach((key) => {
|
||||||
|
const pattern = '{' + key + '}';
|
||||||
|
while (template.indexOf(pattern) > -1) {
|
||||||
|
template = template.replace(pattern, opts[key]);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return template;
|
||||||
|
}
|
||||||
|
|
||||||
|
// url.resolve needs single trailing slash
|
||||||
|
// to behave correctly, otherwise a double slash
|
||||||
|
// may end up in the url which breaks requests
|
||||||
|
// and a lacking slash may not lead to proper joining
|
||||||
|
function fix_slashes(pathname) {
|
||||||
|
if (pathname.slice(-1) !== '/') {
|
||||||
|
return pathname + '/';
|
||||||
|
}
|
||||||
|
return pathname;
|
||||||
|
}
|
||||||
|
|
||||||
|
// remove double slashes
|
||||||
|
// note: path.normalize will not work because
|
||||||
|
// it will convert forward to back slashes
|
||||||
|
function drop_double_slashes(pathname) {
|
||||||
|
return pathname.replace(/\/\//g, '/');
|
||||||
|
}
|
||||||
|
|
||||||
|
function get_process_runtime(versions) {
|
||||||
|
let runtime = 'node';
|
||||||
|
if (versions['node-webkit']) {
|
||||||
|
runtime = 'node-webkit';
|
||||||
|
} else if (versions.electron) {
|
||||||
|
runtime = 'electron';
|
||||||
|
}
|
||||||
|
return runtime;
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports.get_process_runtime = get_process_runtime;
|
||||||
|
|
||||||
|
const default_package_name = '{module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz';
|
||||||
|
const default_remote_path = '';
|
||||||
|
|
||||||
|
module.exports.evaluate = function(package_json, options, napi_build_version) {
|
||||||
|
options = options || {};
|
||||||
|
validate_config(package_json, options); // options is a suitable substitute for opts in this case
|
||||||
|
const v = package_json.version;
|
||||||
|
const module_version = semver.parse(v);
|
||||||
|
const runtime = options.runtime || get_process_runtime(process.versions);
|
||||||
|
const opts = {
|
||||||
|
name: package_json.name,
|
||||||
|
configuration: options.debug ? 'Debug' : 'Release',
|
||||||
|
debug: options.debug,
|
||||||
|
module_name: package_json.binary.module_name,
|
||||||
|
version: module_version.version,
|
||||||
|
prerelease: module_version.prerelease.length ? module_version.prerelease.join('.') : '',
|
||||||
|
build: module_version.build.length ? module_version.build.join('.') : '',
|
||||||
|
major: module_version.major,
|
||||||
|
minor: module_version.minor,
|
||||||
|
patch: module_version.patch,
|
||||||
|
runtime: runtime,
|
||||||
|
node_abi: get_runtime_abi(runtime, options.target),
|
||||||
|
node_abi_napi: napi.get_napi_version(options.target) ? 'napi' : get_runtime_abi(runtime, options.target),
|
||||||
|
napi_version: napi.get_napi_version(options.target), // non-zero numeric, undefined if unsupported
|
||||||
|
napi_build_version: napi_build_version || '',
|
||||||
|
node_napi_label: napi_build_version ? 'napi-v' + napi_build_version : get_runtime_abi(runtime, options.target),
|
||||||
|
target: options.target || '',
|
||||||
|
platform: options.target_platform || process.platform,
|
||||||
|
target_platform: options.target_platform || process.platform,
|
||||||
|
arch: options.target_arch || process.arch,
|
||||||
|
target_arch: options.target_arch || process.arch,
|
||||||
|
libc: options.target_libc || detect_libc.familySync() || 'unknown',
|
||||||
|
module_main: package_json.main,
|
||||||
|
toolset: options.toolset || '', // address https://github.com/mapbox/node-pre-gyp/issues/119
|
||||||
|
bucket: package_json.binary.bucket,
|
||||||
|
region: package_json.binary.region,
|
||||||
|
s3ForcePathStyle: package_json.binary.s3ForcePathStyle || false
|
||||||
|
};
|
||||||
|
// support host mirror with npm config `--{module_name}_binary_host_mirror`
|
||||||
|
// e.g.: https://github.com/node-inspector/v8-profiler/blob/master/package.json#L25
|
||||||
|
// > npm install v8-profiler --profiler_binary_host_mirror=https://npm.taobao.org/mirrors/node-inspector/
|
||||||
|
const validModuleName = opts.module_name.replace('-', '_');
|
||||||
|
const host = process.env['npm_config_' + validModuleName + '_binary_host_mirror'] || package_json.binary.host;
|
||||||
|
opts.host = fix_slashes(eval_template(host, opts));
|
||||||
|
opts.module_path = eval_template(package_json.binary.module_path, opts);
|
||||||
|
// now we resolve the module_path to ensure it is absolute so that binding.gyp variables work predictably
|
||||||
|
if (options.module_root) {
|
||||||
|
// resolve relative to known module root: works for pre-binding require
|
||||||
|
opts.module_path = path.join(options.module_root, opts.module_path);
|
||||||
|
} else {
|
||||||
|
// resolve relative to current working directory: works for node-pre-gyp commands
|
||||||
|
opts.module_path = path.resolve(opts.module_path);
|
||||||
|
}
|
||||||
|
opts.module = path.join(opts.module_path, opts.module_name + '.node');
|
||||||
|
opts.remote_path = package_json.binary.remote_path ? drop_double_slashes(fix_slashes(eval_template(package_json.binary.remote_path, opts))) : default_remote_path;
|
||||||
|
const package_name = package_json.binary.package_name ? package_json.binary.package_name : default_package_name;
|
||||||
|
opts.package_name = eval_template(package_name, opts);
|
||||||
|
opts.staged_tarball = path.join('build/stage', opts.remote_path, opts.package_name);
|
||||||
|
opts.hosted_path = url.resolve(opts.host, opts.remote_path);
|
||||||
|
opts.hosted_tarball = url.resolve(opts.hosted_path, opts.package_name);
|
||||||
|
return opts;
|
||||||
|
};
|
||||||
1
node_modules/@mapbox/node-pre-gyp/node_modules/.bin/nopt
generated
vendored
Symbolic link
1
node_modules/@mapbox/node-pre-gyp/node_modules/.bin/nopt
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../nopt/bin/nopt.js
|
||||||
58
node_modules/@mapbox/node-pre-gyp/node_modules/nopt/CHANGELOG.md
generated
vendored
Normal file
58
node_modules/@mapbox/node-pre-gyp/node_modules/nopt/CHANGELOG.md
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
### v4.0.1 (2016-12-14)
|
||||||
|
|
||||||
|
#### WHOOPS
|
||||||
|
|
||||||
|
* [`fb9b1ce`](https://github.com/npm/nopt/commit/fb9b1ce57b3c69b4f7819015be87719204f77ef6)
|
||||||
|
Merged so many patches at once that the code fencing
|
||||||
|
([@adius](https://github.com/adius)) added got broken. Sorry,
|
||||||
|
([@adius](https://github.com/adius))!
|
||||||
|
([@othiym23](https://github.com/othiym23))
|
||||||
|
|
||||||
|
### v4.0.0 (2016-12-13)
|
||||||
|
|
||||||
|
#### BREAKING CHANGES
|
||||||
|
|
||||||
|
* [`651d447`](https://github.com/npm/nopt/commit/651d4473946096d341a480bbe56793de3fc706aa)
|
||||||
|
When parsing String-typed arguments, if the next value is `""`, don't simply
|
||||||
|
swallow it. ([@samjonester](https://github.com/samjonester))
|
||||||
|
|
||||||
|
#### PERFORMANCE TWEAKS
|
||||||
|
|
||||||
|
* [`3370ce8`](https://github.com/npm/nopt/commit/3370ce87a7618ba228883861db84ddbcdff252a9)
|
||||||
|
Simplify initialization. ([@elidoran](https://github.com/elidoran))
|
||||||
|
* [`356e58e`](https://github.com/npm/nopt/commit/356e58e3b3b431a4b1af7fd7bdee44c2c0526a09)
|
||||||
|
Store `Array.isArray(types[arg])` for reuse.
|
||||||
|
([@elidoran](https://github.com/elidoran))
|
||||||
|
* [`0d95e90`](https://github.com/npm/nopt/commit/0d95e90515844f266015b56d2c80b94e5d14a07e)
|
||||||
|
Interpret single-item type arrays as a single type.
|
||||||
|
([@samjonester](https://github.com/samjonester))
|
||||||
|
* [`07c69d3`](https://github.com/npm/nopt/commit/07c69d38b5186450941fbb505550becb78a0e925)
|
||||||
|
Simplify key-value extraction. ([@elidoran](https://github.com/elidoran))
|
||||||
|
* [`39b6e5c`](https://github.com/npm/nopt/commit/39b6e5c65ac47f60cd43a1fbeece5cd4c834c254)
|
||||||
|
Only call `Date.parse(val)` once. ([@elidoran](https://github.com/elidoran))
|
||||||
|
* [`934943d`](https://github.com/npm/nopt/commit/934943dffecb55123a2b15959fe2a359319a5dbd)
|
||||||
|
Use `osenv.home()` to find a user's home directory instead of assuming it's
|
||||||
|
always `$HOME`. ([@othiym23](https://github.com/othiym23))
|
||||||
|
|
||||||
|
#### TEST & CI IMPROVEMENTS
|
||||||
|
|
||||||
|
* [`326ffff`](https://github.com/npm/nopt/commit/326ffff7f78a00bcd316adecf69075f8a8093619)
|
||||||
|
Fix `/tmp` test to work on Windows.
|
||||||
|
([@elidoran](https://github.com/elidoran))
|
||||||
|
* [`c89d31a`](https://github.com/npm/nopt/commit/c89d31a49d14f2238bc6672db08da697bbc57f1b)
|
||||||
|
Only run Windows tests on Windows, only run Unix tests on a Unix.
|
||||||
|
([@elidoran](https://github.com/elidoran))
|
||||||
|
* [`affd3d1`](https://github.com/npm/nopt/commit/affd3d1d0addffa93006397b2013b18447339366)
|
||||||
|
Refresh Travis to run the tests against the currently-supported batch of npm
|
||||||
|
versions. ([@helio](https://github.com/helio)-frota)
|
||||||
|
* [`55f9449`](https://github.com/npm/nopt/commit/55f94497d163ed4d16dd55fd6c4fb95cc440e66d)
|
||||||
|
`tap@8.0.1` ([@othiym23](https://github.com/othiym23))
|
||||||
|
|
||||||
|
#### DOC TWEAKS
|
||||||
|
|
||||||
|
* [`5271229`](https://github.com/npm/nopt/commit/5271229ee7c810217dd51616c086f5d9ab224581)
|
||||||
|
Use JavaScript code block for syntax highlighting.
|
||||||
|
([@adius](https://github.com/adius))
|
||||||
|
* [`c0d156f`](https://github.com/npm/nopt/commit/c0d156f229f9994c5dfcec4a8886eceff7a07682)
|
||||||
|
The code sample in the README had `many2: [ oneThing ]`, and now it has
|
||||||
|
`many2: [ two, things ]`. ([@silkentrance](https://github.com/silkentrance))
|
||||||
15
node_modules/@mapbox/node-pre-gyp/node_modules/nopt/LICENSE
generated
vendored
Normal file
15
node_modules/@mapbox/node-pre-gyp/node_modules/nopt/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
The ISC License
|
||||||
|
|
||||||
|
Copyright (c) Isaac Z. Schlueter and Contributors
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and/or distribute this software for any
|
||||||
|
purpose with or without fee is hereby granted, provided that the above
|
||||||
|
copyright notice and this permission notice appear in all copies.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||||
|
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
213
node_modules/@mapbox/node-pre-gyp/node_modules/nopt/README.md
generated
vendored
Normal file
213
node_modules/@mapbox/node-pre-gyp/node_modules/nopt/README.md
generated
vendored
Normal file
@@ -0,0 +1,213 @@
|
|||||||
|
If you want to write an option parser, and have it be good, there are
|
||||||
|
two ways to do it. The Right Way, and the Wrong Way.
|
||||||
|
|
||||||
|
The Wrong Way is to sit down and write an option parser. We've all done
|
||||||
|
that.
|
||||||
|
|
||||||
|
The Right Way is to write some complex configurable program with so many
|
||||||
|
options that you hit the limit of your frustration just trying to
|
||||||
|
manage them all, and defer it with duct-tape solutions until you see
|
||||||
|
exactly to the core of the problem, and finally snap and write an
|
||||||
|
awesome option parser.
|
||||||
|
|
||||||
|
If you want to write an option parser, don't write an option parser.
|
||||||
|
Write a package manager, or a source control system, or a service
|
||||||
|
restarter, or an operating system. You probably won't end up with a
|
||||||
|
good one of those, but if you don't give up, and you are relentless and
|
||||||
|
diligent enough in your procrastination, you may just end up with a very
|
||||||
|
nice option parser.
|
||||||
|
|
||||||
|
## USAGE
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
// my-program.js
|
||||||
|
var nopt = require("nopt")
|
||||||
|
, Stream = require("stream").Stream
|
||||||
|
, path = require("path")
|
||||||
|
, knownOpts = { "foo" : [String, null]
|
||||||
|
, "bar" : [Stream, Number]
|
||||||
|
, "baz" : path
|
||||||
|
, "bloo" : [ "big", "medium", "small" ]
|
||||||
|
, "flag" : Boolean
|
||||||
|
, "pick" : Boolean
|
||||||
|
, "many1" : [String, Array]
|
||||||
|
, "many2" : [path, Array]
|
||||||
|
}
|
||||||
|
, shortHands = { "foofoo" : ["--foo", "Mr. Foo"]
|
||||||
|
, "b7" : ["--bar", "7"]
|
||||||
|
, "m" : ["--bloo", "medium"]
|
||||||
|
, "p" : ["--pick"]
|
||||||
|
, "f" : ["--flag"]
|
||||||
|
}
|
||||||
|
// everything is optional.
|
||||||
|
// knownOpts and shorthands default to {}
|
||||||
|
// arg list defaults to process.argv
|
||||||
|
// slice defaults to 2
|
||||||
|
, parsed = nopt(knownOpts, shortHands, process.argv, 2)
|
||||||
|
console.log(parsed)
|
||||||
|
```
|
||||||
|
|
||||||
|
This would give you support for any of the following:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ node my-program.js --foo "blerp" --no-flag
|
||||||
|
{ "foo" : "blerp", "flag" : false }
|
||||||
|
|
||||||
|
$ node my-program.js ---bar 7 --foo "Mr. Hand" --flag
|
||||||
|
{ bar: 7, foo: "Mr. Hand", flag: true }
|
||||||
|
|
||||||
|
$ node my-program.js --foo "blerp" -f -----p
|
||||||
|
{ foo: "blerp", flag: true, pick: true }
|
||||||
|
|
||||||
|
$ node my-program.js -fp --foofoo
|
||||||
|
{ foo: "Mr. Foo", flag: true, pick: true }
|
||||||
|
|
||||||
|
$ node my-program.js --foofoo -- -fp # -- stops the flag parsing.
|
||||||
|
{ foo: "Mr. Foo", argv: { remain: ["-fp"] } }
|
||||||
|
|
||||||
|
$ node my-program.js --blatzk -fp # unknown opts are ok.
|
||||||
|
{ blatzk: true, flag: true, pick: true }
|
||||||
|
|
||||||
|
$ node my-program.js --blatzk=1000 -fp # but you need to use = if they have a value
|
||||||
|
{ blatzk: 1000, flag: true, pick: true }
|
||||||
|
|
||||||
|
$ node my-program.js --no-blatzk -fp # unless they start with "no-"
|
||||||
|
{ blatzk: false, flag: true, pick: true }
|
||||||
|
|
||||||
|
$ node my-program.js --baz b/a/z # known paths are resolved.
|
||||||
|
{ baz: "/Users/isaacs/b/a/z" }
|
||||||
|
|
||||||
|
# if Array is one of the types, then it can take many
|
||||||
|
# values, and will always be an array. The other types provided
|
||||||
|
# specify what types are allowed in the list.
|
||||||
|
|
||||||
|
$ node my-program.js --many1 5 --many1 null --many1 foo
|
||||||
|
{ many1: ["5", "null", "foo"] }
|
||||||
|
|
||||||
|
$ node my-program.js --many2 foo --many2 bar
|
||||||
|
{ many2: ["/path/to/foo", "path/to/bar"] }
|
||||||
|
```
|
||||||
|
|
||||||
|
Read the tests at the bottom of `lib/nopt.js` for more examples of
|
||||||
|
what this puppy can do.
|
||||||
|
|
||||||
|
## Types
|
||||||
|
|
||||||
|
The following types are supported, and defined on `nopt.typeDefs`
|
||||||
|
|
||||||
|
* String: A normal string. No parsing is done.
|
||||||
|
* path: A file system path. Gets resolved against cwd if not absolute.
|
||||||
|
* url: A url. If it doesn't parse, it isn't accepted.
|
||||||
|
* Number: Must be numeric.
|
||||||
|
* Date: Must parse as a date. If it does, and `Date` is one of the options,
|
||||||
|
then it will return a Date object, not a string.
|
||||||
|
* Boolean: Must be either `true` or `false`. If an option is a boolean,
|
||||||
|
then it does not need a value, and its presence will imply `true` as
|
||||||
|
the value. To negate boolean flags, do `--no-whatever` or `--whatever
|
||||||
|
false`
|
||||||
|
* NaN: Means that the option is strictly not allowed. Any value will
|
||||||
|
fail.
|
||||||
|
* Stream: An object matching the "Stream" class in node. Valuable
|
||||||
|
for use when validating programmatically. (npm uses this to let you
|
||||||
|
supply any WriteStream on the `outfd` and `logfd` config options.)
|
||||||
|
* Array: If `Array` is specified as one of the types, then the value
|
||||||
|
will be parsed as a list of options. This means that multiple values
|
||||||
|
can be specified, and that the value will always be an array.
|
||||||
|
|
||||||
|
If a type is an array of values not on this list, then those are
|
||||||
|
considered valid values. For instance, in the example above, the
|
||||||
|
`--bloo` option can only be one of `"big"`, `"medium"`, or `"small"`,
|
||||||
|
and any other value will be rejected.
|
||||||
|
|
||||||
|
When parsing unknown fields, `"true"`, `"false"`, and `"null"` will be
|
||||||
|
interpreted as their JavaScript equivalents.
|
||||||
|
|
||||||
|
You can also mix types and values, or multiple types, in a list. For
|
||||||
|
instance `{ blah: [Number, null] }` would allow a value to be set to
|
||||||
|
either a Number or null. When types are ordered, this implies a
|
||||||
|
preference, and the first type that can be used to properly interpret
|
||||||
|
the value will be used.
|
||||||
|
|
||||||
|
To define a new type, add it to `nopt.typeDefs`. Each item in that
|
||||||
|
hash is an object with a `type` member and a `validate` method. The
|
||||||
|
`type` member is an object that matches what goes in the type list. The
|
||||||
|
`validate` method is a function that gets called with `validate(data,
|
||||||
|
key, val)`. Validate methods should assign `data[key]` to the valid
|
||||||
|
value of `val` if it can be handled properly, or return boolean
|
||||||
|
`false` if it cannot.
|
||||||
|
|
||||||
|
You can also call `nopt.clean(data, types, typeDefs)` to clean up a
|
||||||
|
config object and remove its invalid properties.
|
||||||
|
|
||||||
|
## Error Handling
|
||||||
|
|
||||||
|
By default, nopt outputs a warning to standard error when invalid values for
|
||||||
|
known options are found. You can change this behavior by assigning a method
|
||||||
|
to `nopt.invalidHandler`. This method will be called with
|
||||||
|
the offending `nopt.invalidHandler(key, val, types)`.
|
||||||
|
|
||||||
|
If no `nopt.invalidHandler` is assigned, then it will console.error
|
||||||
|
its whining. If it is assigned to boolean `false` then the warning is
|
||||||
|
suppressed.
|
||||||
|
|
||||||
|
## Abbreviations
|
||||||
|
|
||||||
|
Yes, they are supported. If you define options like this:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
{ "foolhardyelephants" : Boolean
|
||||||
|
, "pileofmonkeys" : Boolean }
|
||||||
|
```
|
||||||
|
|
||||||
|
Then this will work:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
node program.js --foolhar --pil
|
||||||
|
node program.js --no-f --pileofmon
|
||||||
|
# etc.
|
||||||
|
```
|
||||||
|
|
||||||
|
## Shorthands
|
||||||
|
|
||||||
|
Shorthands are a hash of shorter option names to a snippet of args that
|
||||||
|
they expand to.
|
||||||
|
|
||||||
|
If multiple one-character shorthands are all combined, and the
|
||||||
|
combination does not unambiguously match any other option or shorthand,
|
||||||
|
then they will be broken up into their constituent parts. For example:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{ "s" : ["--loglevel", "silent"]
|
||||||
|
, "g" : "--global"
|
||||||
|
, "f" : "--force"
|
||||||
|
, "p" : "--parseable"
|
||||||
|
, "l" : "--long"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm ls -sgflp
|
||||||
|
# just like doing this:
|
||||||
|
npm ls --loglevel silent --global --force --long --parseable
|
||||||
|
```
|
||||||
|
|
||||||
|
## The Rest of the args
|
||||||
|
|
||||||
|
The config object returned by nopt is given a special member called
|
||||||
|
`argv`, which is an object with the following fields:
|
||||||
|
|
||||||
|
* `remain`: The remaining args after all the parsing has occurred.
|
||||||
|
* `original`: The args as they originally appeared.
|
||||||
|
* `cooked`: The args after flags and shorthands are expanded.
|
||||||
|
|
||||||
|
## Slicing
|
||||||
|
|
||||||
|
Node programs are called with more or less the exact argv as it appears
|
||||||
|
in C land, after the v8 and node-specific options have been plucked off.
|
||||||
|
As such, `argv[0]` is always `node` and `argv[1]` is always the
|
||||||
|
JavaScript program being run.
|
||||||
|
|
||||||
|
That's usually not very useful to you. So they're sliced off by
|
||||||
|
default. If you want them, then you can pass in `0` as the last
|
||||||
|
argument, or any other number that you'd like to slice off the start of
|
||||||
|
the list.
|
||||||
54
node_modules/@mapbox/node-pre-gyp/node_modules/nopt/bin/nopt.js
generated
vendored
Executable file
54
node_modules/@mapbox/node-pre-gyp/node_modules/nopt/bin/nopt.js
generated
vendored
Executable file
@@ -0,0 +1,54 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
|
var nopt = require("../lib/nopt")
|
||||||
|
, path = require("path")
|
||||||
|
, types = { num: Number
|
||||||
|
, bool: Boolean
|
||||||
|
, help: Boolean
|
||||||
|
, list: Array
|
||||||
|
, "num-list": [Number, Array]
|
||||||
|
, "str-list": [String, Array]
|
||||||
|
, "bool-list": [Boolean, Array]
|
||||||
|
, str: String
|
||||||
|
, clear: Boolean
|
||||||
|
, config: Boolean
|
||||||
|
, length: Number
|
||||||
|
, file: path
|
||||||
|
}
|
||||||
|
, shorthands = { s: [ "--str", "astring" ]
|
||||||
|
, b: [ "--bool" ]
|
||||||
|
, nb: [ "--no-bool" ]
|
||||||
|
, tft: [ "--bool-list", "--no-bool-list", "--bool-list", "true" ]
|
||||||
|
, "?": ["--help"]
|
||||||
|
, h: ["--help"]
|
||||||
|
, H: ["--help"]
|
||||||
|
, n: [ "--num", "125" ]
|
||||||
|
, c: ["--config"]
|
||||||
|
, l: ["--length"]
|
||||||
|
, f: ["--file"]
|
||||||
|
}
|
||||||
|
, parsed = nopt( types
|
||||||
|
, shorthands
|
||||||
|
, process.argv
|
||||||
|
, 2 )
|
||||||
|
|
||||||
|
console.log("parsed", parsed)
|
||||||
|
|
||||||
|
if (parsed.help) {
|
||||||
|
console.log("")
|
||||||
|
console.log("nopt cli tester")
|
||||||
|
console.log("")
|
||||||
|
console.log("types")
|
||||||
|
console.log(Object.keys(types).map(function M (t) {
|
||||||
|
var type = types[t]
|
||||||
|
if (Array.isArray(type)) {
|
||||||
|
return [t, type.map(function (type) { return type.name })]
|
||||||
|
}
|
||||||
|
return [t, type && type.name]
|
||||||
|
}).reduce(function (s, i) {
|
||||||
|
s[i[0]] = i[1]
|
||||||
|
return s
|
||||||
|
}, {}))
|
||||||
|
console.log("")
|
||||||
|
console.log("shorthands")
|
||||||
|
console.log(shorthands)
|
||||||
|
}
|
||||||
441
node_modules/@mapbox/node-pre-gyp/node_modules/nopt/lib/nopt.js
generated
vendored
Normal file
441
node_modules/@mapbox/node-pre-gyp/node_modules/nopt/lib/nopt.js
generated
vendored
Normal file
@@ -0,0 +1,441 @@
|
|||||||
|
// info about each config option.
|
||||||
|
|
||||||
|
var debug = process.env.DEBUG_NOPT || process.env.NOPT_DEBUG
|
||||||
|
? function () { console.error.apply(console, arguments) }
|
||||||
|
: function () {}
|
||||||
|
|
||||||
|
var url = require("url")
|
||||||
|
, path = require("path")
|
||||||
|
, Stream = require("stream").Stream
|
||||||
|
, abbrev = require("abbrev")
|
||||||
|
, os = require("os")
|
||||||
|
|
||||||
|
module.exports = exports = nopt
|
||||||
|
exports.clean = clean
|
||||||
|
|
||||||
|
exports.typeDefs =
|
||||||
|
{ String : { type: String, validate: validateString }
|
||||||
|
, Boolean : { type: Boolean, validate: validateBoolean }
|
||||||
|
, url : { type: url, validate: validateUrl }
|
||||||
|
, Number : { type: Number, validate: validateNumber }
|
||||||
|
, path : { type: path, validate: validatePath }
|
||||||
|
, Stream : { type: Stream, validate: validateStream }
|
||||||
|
, Date : { type: Date, validate: validateDate }
|
||||||
|
}
|
||||||
|
|
||||||
|
function nopt (types, shorthands, args, slice) {
|
||||||
|
args = args || process.argv
|
||||||
|
types = types || {}
|
||||||
|
shorthands = shorthands || {}
|
||||||
|
if (typeof slice !== "number") slice = 2
|
||||||
|
|
||||||
|
debug(types, shorthands, args, slice)
|
||||||
|
|
||||||
|
args = args.slice(slice)
|
||||||
|
var data = {}
|
||||||
|
, key
|
||||||
|
, argv = {
|
||||||
|
remain: [],
|
||||||
|
cooked: args,
|
||||||
|
original: args.slice(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
parse(args, data, argv.remain, types, shorthands)
|
||||||
|
// now data is full
|
||||||
|
clean(data, types, exports.typeDefs)
|
||||||
|
data.argv = argv
|
||||||
|
Object.defineProperty(data.argv, 'toString', { value: function () {
|
||||||
|
return this.original.map(JSON.stringify).join(" ")
|
||||||
|
}, enumerable: false })
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
|
||||||
|
function clean (data, types, typeDefs) {
|
||||||
|
typeDefs = typeDefs || exports.typeDefs
|
||||||
|
var remove = {}
|
||||||
|
, typeDefault = [false, true, null, String, Array]
|
||||||
|
|
||||||
|
Object.keys(data).forEach(function (k) {
|
||||||
|
if (k === "argv") return
|
||||||
|
var val = data[k]
|
||||||
|
, isArray = Array.isArray(val)
|
||||||
|
, type = types[k]
|
||||||
|
if (!isArray) val = [val]
|
||||||
|
if (!type) type = typeDefault
|
||||||
|
if (type === Array) type = typeDefault.concat(Array)
|
||||||
|
if (!Array.isArray(type)) type = [type]
|
||||||
|
|
||||||
|
debug("val=%j", val)
|
||||||
|
debug("types=", type)
|
||||||
|
val = val.map(function (val) {
|
||||||
|
// if it's an unknown value, then parse false/true/null/numbers/dates
|
||||||
|
if (typeof val === "string") {
|
||||||
|
debug("string %j", val)
|
||||||
|
val = val.trim()
|
||||||
|
if ((val === "null" && ~type.indexOf(null))
|
||||||
|
|| (val === "true" &&
|
||||||
|
(~type.indexOf(true) || ~type.indexOf(Boolean)))
|
||||||
|
|| (val === "false" &&
|
||||||
|
(~type.indexOf(false) || ~type.indexOf(Boolean)))) {
|
||||||
|
val = JSON.parse(val)
|
||||||
|
debug("jsonable %j", val)
|
||||||
|
} else if (~type.indexOf(Number) && !isNaN(val)) {
|
||||||
|
debug("convert to number", val)
|
||||||
|
val = +val
|
||||||
|
} else if (~type.indexOf(Date) && !isNaN(Date.parse(val))) {
|
||||||
|
debug("convert to date", val)
|
||||||
|
val = new Date(val)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!types.hasOwnProperty(k)) {
|
||||||
|
return val
|
||||||
|
}
|
||||||
|
|
||||||
|
// allow `--no-blah` to set 'blah' to null if null is allowed
|
||||||
|
if (val === false && ~type.indexOf(null) &&
|
||||||
|
!(~type.indexOf(false) || ~type.indexOf(Boolean))) {
|
||||||
|
val = null
|
||||||
|
}
|
||||||
|
|
||||||
|
var d = {}
|
||||||
|
d[k] = val
|
||||||
|
debug("prevalidated val", d, val, types[k])
|
||||||
|
if (!validate(d, k, val, types[k], typeDefs)) {
|
||||||
|
if (exports.invalidHandler) {
|
||||||
|
exports.invalidHandler(k, val, types[k], data)
|
||||||
|
} else if (exports.invalidHandler !== false) {
|
||||||
|
debug("invalid: "+k+"="+val, types[k])
|
||||||
|
}
|
||||||
|
return remove
|
||||||
|
}
|
||||||
|
debug("validated val", d, val, types[k])
|
||||||
|
return d[k]
|
||||||
|
}).filter(function (val) { return val !== remove })
|
||||||
|
|
||||||
|
// if we allow Array specifically, then an empty array is how we
|
||||||
|
// express 'no value here', not null. Allow it.
|
||||||
|
if (!val.length && type.indexOf(Array) === -1) {
|
||||||
|
debug('VAL HAS NO LENGTH, DELETE IT', val, k, type.indexOf(Array))
|
||||||
|
delete data[k]
|
||||||
|
}
|
||||||
|
else if (isArray) {
|
||||||
|
debug(isArray, data[k], val)
|
||||||
|
data[k] = val
|
||||||
|
} else data[k] = val[0]
|
||||||
|
|
||||||
|
debug("k=%s val=%j", k, val, data[k])
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
function validateString (data, k, val) {
|
||||||
|
data[k] = String(val)
|
||||||
|
}
|
||||||
|
|
||||||
|
function validatePath (data, k, val) {
|
||||||
|
if (val === true) return false
|
||||||
|
if (val === null) return true
|
||||||
|
|
||||||
|
val = String(val)
|
||||||
|
|
||||||
|
var isWin = process.platform === 'win32'
|
||||||
|
, homePattern = isWin ? /^~(\/|\\)/ : /^~\//
|
||||||
|
, home = os.homedir()
|
||||||
|
|
||||||
|
if (home && val.match(homePattern)) {
|
||||||
|
data[k] = path.resolve(home, val.substr(2))
|
||||||
|
} else {
|
||||||
|
data[k] = path.resolve(val)
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
function validateNumber (data, k, val) {
|
||||||
|
debug("validate Number %j %j %j", k, val, isNaN(val))
|
||||||
|
if (isNaN(val)) return false
|
||||||
|
data[k] = +val
|
||||||
|
}
|
||||||
|
|
||||||
|
function validateDate (data, k, val) {
|
||||||
|
var s = Date.parse(val)
|
||||||
|
debug("validate Date %j %j %j", k, val, s)
|
||||||
|
if (isNaN(s)) return false
|
||||||
|
data[k] = new Date(val)
|
||||||
|
}
|
||||||
|
|
||||||
|
function validateBoolean (data, k, val) {
|
||||||
|
if (val instanceof Boolean) val = val.valueOf()
|
||||||
|
else if (typeof val === "string") {
|
||||||
|
if (!isNaN(val)) val = !!(+val)
|
||||||
|
else if (val === "null" || val === "false") val = false
|
||||||
|
else val = true
|
||||||
|
} else val = !!val
|
||||||
|
data[k] = val
|
||||||
|
}
|
||||||
|
|
||||||
|
function validateUrl (data, k, val) {
|
||||||
|
val = url.parse(String(val))
|
||||||
|
if (!val.host) return false
|
||||||
|
data[k] = val.href
|
||||||
|
}
|
||||||
|
|
||||||
|
function validateStream (data, k, val) {
|
||||||
|
if (!(val instanceof Stream)) return false
|
||||||
|
data[k] = val
|
||||||
|
}
|
||||||
|
|
||||||
|
function validate (data, k, val, type, typeDefs) {
|
||||||
|
// arrays are lists of types.
|
||||||
|
if (Array.isArray(type)) {
|
||||||
|
for (var i = 0, l = type.length; i < l; i ++) {
|
||||||
|
if (type[i] === Array) continue
|
||||||
|
if (validate(data, k, val, type[i], typeDefs)) return true
|
||||||
|
}
|
||||||
|
delete data[k]
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// an array of anything?
|
||||||
|
if (type === Array) return true
|
||||||
|
|
||||||
|
// NaN is poisonous. Means that something is not allowed.
|
||||||
|
if (type !== type) {
|
||||||
|
debug("Poison NaN", k, val, type)
|
||||||
|
delete data[k]
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// explicit list of values
|
||||||
|
if (val === type) {
|
||||||
|
debug("Explicitly allowed %j", val)
|
||||||
|
// if (isArray) (data[k] = data[k] || []).push(val)
|
||||||
|
// else data[k] = val
|
||||||
|
data[k] = val
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// now go through the list of typeDefs, validate against each one.
|
||||||
|
var ok = false
|
||||||
|
, types = Object.keys(typeDefs)
|
||||||
|
for (var i = 0, l = types.length; i < l; i ++) {
|
||||||
|
debug("test type %j %j %j", k, val, types[i])
|
||||||
|
var t = typeDefs[types[i]]
|
||||||
|
if (t &&
|
||||||
|
((type && type.name && t.type && t.type.name) ? (type.name === t.type.name) : (type === t.type))) {
|
||||||
|
var d = {}
|
||||||
|
ok = false !== t.validate(d, k, val)
|
||||||
|
val = d[k]
|
||||||
|
if (ok) {
|
||||||
|
// if (isArray) (data[k] = data[k] || []).push(val)
|
||||||
|
// else data[k] = val
|
||||||
|
data[k] = val
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
debug("OK? %j (%j %j %j)", ok, k, val, types[i])
|
||||||
|
|
||||||
|
if (!ok) delete data[k]
|
||||||
|
return ok
|
||||||
|
}
|
||||||
|
|
||||||
|
function parse (args, data, remain, types, shorthands) {
|
||||||
|
debug("parse", args, data, remain)
|
||||||
|
|
||||||
|
var key = null
|
||||||
|
, abbrevs = abbrev(Object.keys(types))
|
||||||
|
, shortAbbr = abbrev(Object.keys(shorthands))
|
||||||
|
|
||||||
|
for (var i = 0; i < args.length; i ++) {
|
||||||
|
var arg = args[i]
|
||||||
|
debug("arg", arg)
|
||||||
|
|
||||||
|
if (arg.match(/^-{2,}$/)) {
|
||||||
|
// done with keys.
|
||||||
|
// the rest are args.
|
||||||
|
remain.push.apply(remain, args.slice(i + 1))
|
||||||
|
args[i] = "--"
|
||||||
|
break
|
||||||
|
}
|
||||||
|
var hadEq = false
|
||||||
|
if (arg.charAt(0) === "-" && arg.length > 1) {
|
||||||
|
var at = arg.indexOf('=')
|
||||||
|
if (at > -1) {
|
||||||
|
hadEq = true
|
||||||
|
var v = arg.substr(at + 1)
|
||||||
|
arg = arg.substr(0, at)
|
||||||
|
args.splice(i, 1, arg, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
// see if it's a shorthand
|
||||||
|
// if so, splice and back up to re-parse it.
|
||||||
|
var shRes = resolveShort(arg, shorthands, shortAbbr, abbrevs)
|
||||||
|
debug("arg=%j shRes=%j", arg, shRes)
|
||||||
|
if (shRes) {
|
||||||
|
debug(arg, shRes)
|
||||||
|
args.splice.apply(args, [i, 1].concat(shRes))
|
||||||
|
if (arg !== shRes[0]) {
|
||||||
|
i --
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
arg = arg.replace(/^-+/, "")
|
||||||
|
var no = null
|
||||||
|
while (arg.toLowerCase().indexOf("no-") === 0) {
|
||||||
|
no = !no
|
||||||
|
arg = arg.substr(3)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (abbrevs[arg]) arg = abbrevs[arg]
|
||||||
|
|
||||||
|
var argType = types[arg]
|
||||||
|
var isTypeArray = Array.isArray(argType)
|
||||||
|
if (isTypeArray && argType.length === 1) {
|
||||||
|
isTypeArray = false
|
||||||
|
argType = argType[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
var isArray = argType === Array ||
|
||||||
|
isTypeArray && argType.indexOf(Array) !== -1
|
||||||
|
|
||||||
|
// allow unknown things to be arrays if specified multiple times.
|
||||||
|
if (!types.hasOwnProperty(arg) && data.hasOwnProperty(arg)) {
|
||||||
|
if (!Array.isArray(data[arg]))
|
||||||
|
data[arg] = [data[arg]]
|
||||||
|
isArray = true
|
||||||
|
}
|
||||||
|
|
||||||
|
var val
|
||||||
|
, la = args[i + 1]
|
||||||
|
|
||||||
|
var isBool = typeof no === 'boolean' ||
|
||||||
|
argType === Boolean ||
|
||||||
|
isTypeArray && argType.indexOf(Boolean) !== -1 ||
|
||||||
|
(typeof argType === 'undefined' && !hadEq) ||
|
||||||
|
(la === "false" &&
|
||||||
|
(argType === null ||
|
||||||
|
isTypeArray && ~argType.indexOf(null)))
|
||||||
|
|
||||||
|
if (isBool) {
|
||||||
|
// just set and move along
|
||||||
|
val = !no
|
||||||
|
// however, also support --bool true or --bool false
|
||||||
|
if (la === "true" || la === "false") {
|
||||||
|
val = JSON.parse(la)
|
||||||
|
la = null
|
||||||
|
if (no) val = !val
|
||||||
|
i ++
|
||||||
|
}
|
||||||
|
|
||||||
|
// also support "foo":[Boolean, "bar"] and "--foo bar"
|
||||||
|
if (isTypeArray && la) {
|
||||||
|
if (~argType.indexOf(la)) {
|
||||||
|
// an explicit type
|
||||||
|
val = la
|
||||||
|
i ++
|
||||||
|
} else if ( la === "null" && ~argType.indexOf(null) ) {
|
||||||
|
// null allowed
|
||||||
|
val = null
|
||||||
|
i ++
|
||||||
|
} else if ( !la.match(/^-{2,}[^-]/) &&
|
||||||
|
!isNaN(la) &&
|
||||||
|
~argType.indexOf(Number) ) {
|
||||||
|
// number
|
||||||
|
val = +la
|
||||||
|
i ++
|
||||||
|
} else if ( !la.match(/^-[^-]/) && ~argType.indexOf(String) ) {
|
||||||
|
// string
|
||||||
|
val = la
|
||||||
|
i ++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isArray) (data[arg] = data[arg] || []).push(val)
|
||||||
|
else data[arg] = val
|
||||||
|
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (argType === String) {
|
||||||
|
if (la === undefined) {
|
||||||
|
la = ""
|
||||||
|
} else if (la.match(/^-{1,2}[^-]+/)) {
|
||||||
|
la = ""
|
||||||
|
i --
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (la && la.match(/^-{2,}$/)) {
|
||||||
|
la = undefined
|
||||||
|
i --
|
||||||
|
}
|
||||||
|
|
||||||
|
val = la === undefined ? true : la
|
||||||
|
if (isArray) (data[arg] = data[arg] || []).push(val)
|
||||||
|
else data[arg] = val
|
||||||
|
|
||||||
|
i ++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
remain.push(arg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function resolveShort (arg, shorthands, shortAbbr, abbrevs) {
|
||||||
|
// handle single-char shorthands glommed together, like
|
||||||
|
// npm ls -glp, but only if there is one dash, and only if
|
||||||
|
// all of the chars are single-char shorthands, and it's
|
||||||
|
// not a match to some other abbrev.
|
||||||
|
arg = arg.replace(/^-+/, '')
|
||||||
|
|
||||||
|
// if it's an exact known option, then don't go any further
|
||||||
|
if (abbrevs[arg] === arg)
|
||||||
|
return null
|
||||||
|
|
||||||
|
// if it's an exact known shortopt, same deal
|
||||||
|
if (shorthands[arg]) {
|
||||||
|
// make it an array, if it's a list of words
|
||||||
|
if (shorthands[arg] && !Array.isArray(shorthands[arg]))
|
||||||
|
shorthands[arg] = shorthands[arg].split(/\s+/)
|
||||||
|
|
||||||
|
return shorthands[arg]
|
||||||
|
}
|
||||||
|
|
||||||
|
// first check to see if this arg is a set of single-char shorthands
|
||||||
|
var singles = shorthands.___singles
|
||||||
|
if (!singles) {
|
||||||
|
singles = Object.keys(shorthands).filter(function (s) {
|
||||||
|
return s.length === 1
|
||||||
|
}).reduce(function (l,r) {
|
||||||
|
l[r] = true
|
||||||
|
return l
|
||||||
|
}, {})
|
||||||
|
shorthands.___singles = singles
|
||||||
|
debug('shorthand singles', singles)
|
||||||
|
}
|
||||||
|
|
||||||
|
var chrs = arg.split("").filter(function (c) {
|
||||||
|
return singles[c]
|
||||||
|
})
|
||||||
|
|
||||||
|
if (chrs.join("") === arg) return chrs.map(function (c) {
|
||||||
|
return shorthands[c]
|
||||||
|
}).reduce(function (l, r) {
|
||||||
|
return l.concat(r)
|
||||||
|
}, [])
|
||||||
|
|
||||||
|
|
||||||
|
// if it's an arg abbrev, and not a literal shorthand, then prefer the arg
|
||||||
|
if (abbrevs[arg] && !shorthands[arg])
|
||||||
|
return null
|
||||||
|
|
||||||
|
// if it's an abbr for a shorthand, then use that
|
||||||
|
if (shortAbbr[arg])
|
||||||
|
arg = shortAbbr[arg]
|
||||||
|
|
||||||
|
// make it an array, if it's a list of words
|
||||||
|
if (shorthands[arg] && !Array.isArray(shorthands[arg]))
|
||||||
|
shorthands[arg] = shorthands[arg].split(/\s+/)
|
||||||
|
|
||||||
|
return shorthands[arg]
|
||||||
|
}
|
||||||
34
node_modules/@mapbox/node-pre-gyp/node_modules/nopt/package.json
generated
vendored
Normal file
34
node_modules/@mapbox/node-pre-gyp/node_modules/nopt/package.json
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
{
|
||||||
|
"name": "nopt",
|
||||||
|
"version": "5.0.0",
|
||||||
|
"description": "Option parsing for Node, supporting types, shorthands, etc. Used by npm.",
|
||||||
|
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
|
||||||
|
"main": "lib/nopt.js",
|
||||||
|
"scripts": {
|
||||||
|
"preversion": "npm test",
|
||||||
|
"postversion": "npm publish",
|
||||||
|
"prepublishOnly": "git push origin --follow-tags",
|
||||||
|
"test": "tap test/*.js"
|
||||||
|
},
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/npm/nopt.git"
|
||||||
|
},
|
||||||
|
"bin": {
|
||||||
|
"nopt": "bin/nopt.js"
|
||||||
|
},
|
||||||
|
"license": "ISC",
|
||||||
|
"dependencies": {
|
||||||
|
"abbrev": "1"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"tap": "^14.10.6"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"bin",
|
||||||
|
"lib"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=6"
|
||||||
|
}
|
||||||
|
}
|
||||||
62
node_modules/@mapbox/node-pre-gyp/package.json
generated
vendored
Normal file
62
node_modules/@mapbox/node-pre-gyp/package.json
generated
vendored
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
{
|
||||||
|
"name": "@mapbox/node-pre-gyp",
|
||||||
|
"description": "Node.js native addon binary install tool",
|
||||||
|
"version": "1.0.11",
|
||||||
|
"keywords": [
|
||||||
|
"native",
|
||||||
|
"addon",
|
||||||
|
"module",
|
||||||
|
"c",
|
||||||
|
"c++",
|
||||||
|
"bindings",
|
||||||
|
"binary"
|
||||||
|
],
|
||||||
|
"license": "BSD-3-Clause",
|
||||||
|
"author": "Dane Springmeyer <dane@mapbox.com>",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git://github.com/mapbox/node-pre-gyp.git"
|
||||||
|
},
|
||||||
|
"bin": "./bin/node-pre-gyp",
|
||||||
|
"main": "./lib/node-pre-gyp.js",
|
||||||
|
"dependencies": {
|
||||||
|
"detect-libc": "^2.0.0",
|
||||||
|
"https-proxy-agent": "^5.0.0",
|
||||||
|
"make-dir": "^3.1.0",
|
||||||
|
"node-fetch": "^2.6.7",
|
||||||
|
"nopt": "^5.0.0",
|
||||||
|
"npmlog": "^5.0.1",
|
||||||
|
"rimraf": "^3.0.2",
|
||||||
|
"semver": "^7.3.5",
|
||||||
|
"tar": "^6.1.11"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@mapbox/cloudfriend": "^5.1.0",
|
||||||
|
"@mapbox/eslint-config-mapbox": "^3.0.0",
|
||||||
|
"aws-sdk": "^2.1087.0",
|
||||||
|
"codecov": "^3.8.3",
|
||||||
|
"eslint": "^7.32.0",
|
||||||
|
"eslint-plugin-node": "^11.1.0",
|
||||||
|
"mock-aws-s3": "^4.0.2",
|
||||||
|
"nock": "^12.0.3",
|
||||||
|
"node-addon-api": "^4.3.0",
|
||||||
|
"nyc": "^15.1.0",
|
||||||
|
"tape": "^5.5.2",
|
||||||
|
"tar-fs": "^2.1.1"
|
||||||
|
},
|
||||||
|
"nyc": {
|
||||||
|
"all": true,
|
||||||
|
"skip-full": false,
|
||||||
|
"exclude": [
|
||||||
|
"test/**"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"coverage": "nyc --all --include index.js --include lib/ npm test",
|
||||||
|
"upload-coverage": "nyc report --reporter json && codecov --clear --flags=unit --file=./coverage/coverage-final.json",
|
||||||
|
"lint": "eslint bin/node-pre-gyp lib/*js lib/util/*js test/*js scripts/*js",
|
||||||
|
"fix": "npm run lint -- --fix",
|
||||||
|
"update-crosswalk": "node scripts/abi_crosswalk.js",
|
||||||
|
"test": "tape test/*test.js"
|
||||||
|
}
|
||||||
|
}
|
||||||
20
node_modules/@popperjs/core/LICENSE.md
generated
vendored
Normal file
20
node_modules/@popperjs/core/LICENSE.md
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2019 Federico Zivolo
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||||
|
this software and associated documentation files (the "Software"), to deal in
|
||||||
|
the Software without restriction, including without limitation the rights to
|
||||||
|
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||||
|
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||||
|
subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||||
|
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||||
|
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||||
|
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||||
|
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
376
node_modules/@popperjs/core/README.md
generated
vendored
Normal file
376
node_modules/@popperjs/core/README.md
generated
vendored
Normal file
@@ -0,0 +1,376 @@
|
|||||||
|
<!-- <HEADER> // IGNORE IT -->
|
||||||
|
<p align="center">
|
||||||
|
<img src="https://rawcdn.githack.com/popperjs/popper-core/8805a5d7599e14619c9e7ac19a3713285d8e5d7f/docs/src/images/popper-logo-outlined.svg" alt="Popper" height="300px"/>
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<div align="center">
|
||||||
|
<h1>Tooltip & Popover Positioning Engine</h1>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<p align="center">
|
||||||
|
<a href="https://www.npmjs.com/package/@popperjs/core">
|
||||||
|
<img src="https://img.shields.io/npm/v/@popperjs/core?style=for-the-badge" alt="npm version" />
|
||||||
|
</a>
|
||||||
|
<a href="https://www.npmjs.com/package/@popperjs/core">
|
||||||
|
<img src="https://img.shields.io/endpoint?style=for-the-badge&url=https://runkit.io/fezvrasta/combined-npm-downloads/1.0.0?packages=popper.js,@popperjs/core" alt="npm downloads per month (popper.js + @popperjs/core)" />
|
||||||
|
</a>
|
||||||
|
<a href="https://rollingversions.com/popperjs/popper-core">
|
||||||
|
<img src="https://img.shields.io/badge/Rolling%20Versions-Enabled-brightgreen?style=for-the-badge" alt="Rolling Versions" />
|
||||||
|
</a>
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<br />
|
||||||
|
<!-- </HEADER> // NOW BEGINS THE README -->
|
||||||
|
|
||||||
|
**Positioning tooltips and popovers is difficult. Popper is here to help!**
|
||||||
|
|
||||||
|
Given an element, such as a button, and a tooltip element describing it, Popper
|
||||||
|
will automatically put the tooltip in the right place near the button.
|
||||||
|
|
||||||
|
It will position _any_ UI element that "pops out" from the flow of your document
|
||||||
|
and floats near a target element. The most common example is a tooltip, but it
|
||||||
|
also includes popovers, drop-downs, and more. All of these can be generically
|
||||||
|
described as a "popper" element.
|
||||||
|
|
||||||
|
## Demo
|
||||||
|
|
||||||
|
[](https://popper.js.org)
|
||||||
|
|
||||||
|
## Docs
|
||||||
|
|
||||||
|
- [v2.x (latest)](https://popper.js.org/docs/v2/)
|
||||||
|
- [v1.x](https://popper.js.org/docs/v1/)
|
||||||
|
|
||||||
|
We've created a
|
||||||
|
[Migration Guide](https://popper.js.org/docs/v2/migration-guide/) to help you
|
||||||
|
migrate from Popper 1 to Popper 2.
|
||||||
|
|
||||||
|
To contribute to the Popper website and documentation, please visit the
|
||||||
|
[dedicated repository](https://github.com/popperjs/website).
|
||||||
|
|
||||||
|
## Why not use pure CSS?
|
||||||
|
|
||||||
|
- **Clipping and overflow issues**: Pure CSS poppers will not be prevented from
|
||||||
|
overflowing clipping boundaries, such as the viewport. It will get partially
|
||||||
|
cut off or overflows if it's near the edge since there is no dynamic
|
||||||
|
positioning logic. When using Popper, your popper will always be positioned in
|
||||||
|
the right place without needing manual adjustments.
|
||||||
|
- **No flipping**: CSS poppers will not flip to a different placement to fit
|
||||||
|
better in view if necessary. While you can manually adjust for the main axis
|
||||||
|
overflow, this feature cannot be achieved via CSS alone. Popper automatically
|
||||||
|
flips the tooltip to make it fit in view as best as possible for the user.
|
||||||
|
- **No virtual positioning**: CSS poppers cannot follow the mouse cursor or be
|
||||||
|
used as a context menu. Popper allows you to position your tooltip relative to
|
||||||
|
any coordinates you desire.
|
||||||
|
- **Slower development cycle**: When pure CSS is used to position popper
|
||||||
|
elements, the lack of dynamic positioning means they must be carefully placed
|
||||||
|
to consider overflow on all screen sizes. In reusable component libraries,
|
||||||
|
this means a developer can't just add the component anywhere on the page,
|
||||||
|
because these issues need to be considered and adjusted for every time. With
|
||||||
|
Popper, you can place your elements anywhere and they will be positioned
|
||||||
|
correctly, without needing to consider different screen sizes, layouts, etc.
|
||||||
|
This massively speeds up development time because this work is automatically
|
||||||
|
offloaded to Popper.
|
||||||
|
- **Lack of extensibility**: CSS poppers cannot be easily extended to fit any
|
||||||
|
arbitrary use case you may need to adjust for. Popper is built with
|
||||||
|
extensibility in mind.
|
||||||
|
|
||||||
|
## Why Popper?
|
||||||
|
|
||||||
|
With the CSS drawbacks out of the way, we now move on to Popper in the
|
||||||
|
JavaScript space itself.
|
||||||
|
|
||||||
|
Naive JavaScript tooltip implementations usually have the following problems:
|
||||||
|
|
||||||
|
- **Scrolling containers**: They don't ensure the tooltip stays with the
|
||||||
|
reference element while scrolling when inside any number of scrolling
|
||||||
|
containers.
|
||||||
|
- **DOM context**: They often require the tooltip move outside of its original
|
||||||
|
DOM context because they don't handle `offsetParent` contexts.
|
||||||
|
- **Compatibility**: Popper handles an incredible number of edge cases regarding
|
||||||
|
different browsers and environments (mobile viewports, RTL, scrollbars enabled
|
||||||
|
or disabled, etc.). Popper is a popular and well-maintained library, so you
|
||||||
|
can be confident positioning will work for your users on any device.
|
||||||
|
- **Configurability**: They often lack advanced configurability to suit any
|
||||||
|
possible use case.
|
||||||
|
- **Size**: They are usually relatively large in size, or require an ancient
|
||||||
|
jQuery dependency.
|
||||||
|
- **Performance**: They often have runtime performance issues and update the
|
||||||
|
tooltip position too slowly.
|
||||||
|
|
||||||
|
**Popper solves all of these key problems in an elegant, performant manner.** It
|
||||||
|
is a lightweight ~3 kB library that aims to provide a reliable and extensible
|
||||||
|
positioning engine you can use to ensure all your popper elements are positioned
|
||||||
|
in the right place.
|
||||||
|
|
||||||
|
When you start writing your own popper implementation, you'll quickly run into
|
||||||
|
all of the problems mentioned above. These widgets are incredibly common in our
|
||||||
|
UIs; we've done the hard work figuring this out so you don't need to spend hours
|
||||||
|
fixing and handling numerous edge cases that we already ran into while building
|
||||||
|
the library!
|
||||||
|
|
||||||
|
Popper is used in popular libraries like Bootstrap, Foundation, Material UI, and
|
||||||
|
more. It's likely you've already used popper elements on the web positioned by
|
||||||
|
Popper at some point in the past few years.
|
||||||
|
|
||||||
|
Since we write UIs using powerful abstraction libraries such as React or Angular
|
||||||
|
nowadays, you'll also be glad to know Popper can fully integrate with them and
|
||||||
|
be a good citizen together with your other components. Check out `react-popper`
|
||||||
|
for the official Popper wrapper for React.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
### 1. Package Manager
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# With npm
|
||||||
|
npm i @popperjs/core
|
||||||
|
|
||||||
|
# With Yarn
|
||||||
|
yarn add @popperjs/core
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. CDN
|
||||||
|
|
||||||
|
```html
|
||||||
|
<!-- Development version -->
|
||||||
|
<script src="https://unpkg.com/@popperjs/core@2/dist/umd/popper.js"></script>
|
||||||
|
|
||||||
|
<!-- Production version -->
|
||||||
|
<script src="https://unpkg.com/@popperjs/core@2"></script>
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Direct Download?
|
||||||
|
|
||||||
|
Managing dependencies by "directly downloading" them and placing them into your
|
||||||
|
source code is not recommended for a variety of reasons, including missing out
|
||||||
|
on feat/fix updates easily. Please use a versioning management system like a CDN
|
||||||
|
or npm/Yarn.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
The most straightforward way to get started is to import Popper from the `unpkg`
|
||||||
|
CDN, which includes all of its features. You can call the `Popper.createPopper`
|
||||||
|
constructor to create new popper instances.
|
||||||
|
|
||||||
|
Here is a complete example:
|
||||||
|
|
||||||
|
```html
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<title>Popper example</title>
|
||||||
|
|
||||||
|
<style>
|
||||||
|
#tooltip {
|
||||||
|
background-color: #333;
|
||||||
|
color: white;
|
||||||
|
padding: 5px 10px;
|
||||||
|
border-radius: 4px;
|
||||||
|
font-size: 13px;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
|
||||||
|
<button id="button" aria-describedby="tooltip">I'm a button</button>
|
||||||
|
<div id="tooltip" role="tooltip">I'm a tooltip</div>
|
||||||
|
|
||||||
|
<script src="https://unpkg.com/@popperjs/core@^2.0.0"></script>
|
||||||
|
<script>
|
||||||
|
const button = document.querySelector('#button');
|
||||||
|
const tooltip = document.querySelector('#tooltip');
|
||||||
|
|
||||||
|
// Pass the button, the tooltip, and some options, and Popper will do the
|
||||||
|
// magic positioning for you:
|
||||||
|
Popper.createPopper(button, tooltip, {
|
||||||
|
placement: 'right',
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
```
|
||||||
|
|
||||||
|
Visit the [tutorial](https://popper.js.org/docs/v2/tutorial/) for an example of
|
||||||
|
how to build your own tooltip from scratch using Popper.
|
||||||
|
|
||||||
|
### Module bundlers
|
||||||
|
|
||||||
|
You can import the `createPopper` constructor from the fully-featured file:
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { createPopper } from '@popperjs/core';
|
||||||
|
|
||||||
|
const button = document.querySelector('#button');
|
||||||
|
const tooltip = document.querySelector('#tooltip');
|
||||||
|
|
||||||
|
// Pass the button, the tooltip, and some options, and Popper will do the
|
||||||
|
// magic positioning for you:
|
||||||
|
createPopper(button, tooltip, {
|
||||||
|
placement: 'right',
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
All the modifiers listed in the docs menu will be enabled and "just work", so
|
||||||
|
you don't need to think about setting Popper up. The size of Popper including
|
||||||
|
all of its features is about 5 kB minzipped, but it may grow a bit in the
|
||||||
|
future.
|
||||||
|
|
||||||
|
#### Popper Lite (tree-shaking)
|
||||||
|
|
||||||
|
If bundle size is important, you'll want to take advantage of tree-shaking. The
|
||||||
|
library is built in a modular way to allow to import only the parts you really
|
||||||
|
need.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { createPopperLite as createPopper } from '@popperjs/core';
|
||||||
|
```
|
||||||
|
|
||||||
|
The Lite version includes the most necessary modifiers that will compute the
|
||||||
|
offsets of the popper, compute and add the positioning styles, and add event
|
||||||
|
listeners. This is close in bundle size to pure CSS tooltip libraries, and
|
||||||
|
behaves somewhat similarly.
|
||||||
|
|
||||||
|
However, this does not include the features that makes Popper truly useful.
|
||||||
|
|
||||||
|
The two most useful modifiers not included in Lite are `preventOverflow` and
|
||||||
|
`flip`:
|
||||||
|
|
||||||
|
```js
|
||||||
|
import {
|
||||||
|
createPopperLite as createPopper,
|
||||||
|
preventOverflow,
|
||||||
|
flip,
|
||||||
|
} from '@popperjs/core';
|
||||||
|
|
||||||
|
const button = document.querySelector('#button');
|
||||||
|
const tooltip = document.querySelector('#tooltip');
|
||||||
|
|
||||||
|
createPopper(button, tooltip, {
|
||||||
|
modifiers: [preventOverflow, flip],
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
As you make more poppers, you may be finding yourself needing other modifiers
|
||||||
|
provided by the library.
|
||||||
|
|
||||||
|
See [tree-shaking](https://popper.js.org/docs/v2/performance/#tree-shaking) for more
|
||||||
|
information.
|
||||||
|
|
||||||
|
## Distribution targets
|
||||||
|
|
||||||
|
Popper is distributed in 3 different versions, in 3 different file formats.
|
||||||
|
|
||||||
|
The 3 file formats are:
|
||||||
|
|
||||||
|
- `esm` (works with `import` syntax — **recommended**)
|
||||||
|
- `umd` (works with `<script>` tags or RequireJS)
|
||||||
|
- `cjs` (works with `require()` syntax)
|
||||||
|
|
||||||
|
There are two different `esm` builds, one for bundler consumers (e.g. webpack,
|
||||||
|
Rollup, etc..), which is located under `/lib`, and one for browsers with native
|
||||||
|
support for ES Modules, under `/dist/esm`. The only difference within the two,
|
||||||
|
is that the browser-compatible version doesn't make use of
|
||||||
|
`process.env.NODE_ENV` to run development checks.
|
||||||
|
|
||||||
|
The 3 versions are:
|
||||||
|
|
||||||
|
- `popper`: includes all the modifiers (features) in one file (**default**);
|
||||||
|
- `popper-lite`: includes only the minimum amount of modifiers to provide the
|
||||||
|
basic functionality;
|
||||||
|
- `popper-base`: doesn't include any modifier, you must import them separately;
|
||||||
|
|
||||||
|
Below you can find the size of each version, minified and compressed with the
|
||||||
|
[Brotli compression algorithm](https://medium.com/groww-engineering/enable-brotli-compression-in-webpack-with-fallback-to-gzip-397a57cf9fc6):
|
||||||
|
|
||||||
|
<!-- Don't change the labels to use hyphens, it breaks, even when encoded -->
|
||||||
|
|
||||||
|

|
||||||
|

|
||||||
|

|
||||||
|
|
||||||
|
## Hacking the library
|
||||||
|
|
||||||
|
If you want to play with the library, implement new features, fix a bug you
|
||||||
|
found, or simply experiment with it, this section is for you!
|
||||||
|
|
||||||
|
First of all, make sure to have
|
||||||
|
[Yarn installed](https://yarnpkg.com/lang/en/docs/install).
|
||||||
|
|
||||||
|
Install the development dependencies:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
yarn install
|
||||||
|
```
|
||||||
|
|
||||||
|
And run the development environment:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
yarn dev
|
||||||
|
```
|
||||||
|
|
||||||
|
Then, simply open one the development server web page:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# macOS and Linux
|
||||||
|
open localhost:5000
|
||||||
|
|
||||||
|
# Windows
|
||||||
|
start localhost:5000
|
||||||
|
```
|
||||||
|
|
||||||
|
From there, you can open any of the examples (`.html` files) to fiddle with
|
||||||
|
them.
|
||||||
|
|
||||||
|
Now any change you will made to the source code, will be automatically compiled,
|
||||||
|
you just need to refresh the page.
|
||||||
|
|
||||||
|
If the page is not working properly, try to go in _"Developer Tools >
|
||||||
|
Application > Clear storage"_ and click on "_Clear site data_".
|
||||||
|
To run the examples you need a browser with
|
||||||
|
[JavaScript modules via script tag support](https://caniuse.com/#feat=es6-module).
|
||||||
|
|
||||||
|
## Test Suite
|
||||||
|
|
||||||
|
Popper is currently tested with unit tests, and functional tests. Both of them
|
||||||
|
are run by Jest.
|
||||||
|
|
||||||
|
### Unit Tests
|
||||||
|
|
||||||
|
The unit tests use JSDOM to provide a primitive document object API, they are
|
||||||
|
used to ensure the utility functions behave as expected in isolation.
|
||||||
|
|
||||||
|
### Functional Tests
|
||||||
|
|
||||||
|
The functional tests run with Puppeteer, to take advantage of a complete browser
|
||||||
|
environment. They are currently running on Chromium, and Firefox.
|
||||||
|
|
||||||
|
You can run them with `yarn test:functional`. Set the `PUPPETEER_BROWSER`
|
||||||
|
environment variable to `firefox` to run them on the Mozilla browser.
|
||||||
|
|
||||||
|
The assertions are written in form of image snapshots, so that it's easy to
|
||||||
|
assert for the correct Popper behavior without having to write a lot of offsets
|
||||||
|
comparisons manually.
|
||||||
|
|
||||||
|
You can mark a `*.test.js` file to run in the Puppeteer environment by
|
||||||
|
prepending a `@jest-environment puppeteer` JSDoc comment to the interested file.
|
||||||
|
|
||||||
|
Here's an example of a basic functional test:
|
||||||
|
|
||||||
|
```js
|
||||||
|
/**
|
||||||
|
* @jest-environment puppeteer
|
||||||
|
* @flow
|
||||||
|
*/
|
||||||
|
import { screenshot } from '../utils/puppeteer.js';
|
||||||
|
|
||||||
|
it('should position the popper on the right', async () => {
|
||||||
|
const page = await browser.newPage();
|
||||||
|
await page.goto(`${TEST_URL}/basic.html`);
|
||||||
|
|
||||||
|
expect(await screenshot(page)).toMatchImageSnapshot();
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
You can find the complete
|
||||||
|
[`jest-puppeteer` documentation here](https://github.com/smooth-code/jest-puppeteer#api),
|
||||||
|
and the
|
||||||
|
[`jest-image-snapshot` documentation here](https://github.com/americanexpress/jest-image-snapshot#%EF%B8%8F-api).
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
MIT
|
||||||
65
node_modules/@popperjs/core/dist/cjs/enums.js
generated
vendored
Normal file
65
node_modules/@popperjs/core/dist/cjs/enums.js
generated
vendored
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
/**
|
||||||
|
* @popperjs/core v2.11.8 - MIT License
|
||||||
|
*/
|
||||||
|
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
Object.defineProperty(exports, '__esModule', { value: true });
|
||||||
|
|
||||||
|
var top = 'top';
|
||||||
|
var bottom = 'bottom';
|
||||||
|
var right = 'right';
|
||||||
|
var left = 'left';
|
||||||
|
var auto = 'auto';
|
||||||
|
var basePlacements = [top, bottom, right, left];
|
||||||
|
var start = 'start';
|
||||||
|
var end = 'end';
|
||||||
|
var clippingParents = 'clippingParents';
|
||||||
|
var viewport = 'viewport';
|
||||||
|
var popper = 'popper';
|
||||||
|
var reference = 'reference';
|
||||||
|
var variationPlacements = /*#__PURE__*/basePlacements.reduce(function (acc, placement) {
|
||||||
|
return acc.concat([placement + "-" + start, placement + "-" + end]);
|
||||||
|
}, []);
|
||||||
|
var placements = /*#__PURE__*/[].concat(basePlacements, [auto]).reduce(function (acc, placement) {
|
||||||
|
return acc.concat([placement, placement + "-" + start, placement + "-" + end]);
|
||||||
|
}, []); // modifiers that need to read the DOM
|
||||||
|
|
||||||
|
var beforeRead = 'beforeRead';
|
||||||
|
var read = 'read';
|
||||||
|
var afterRead = 'afterRead'; // pure-logic modifiers
|
||||||
|
|
||||||
|
var beforeMain = 'beforeMain';
|
||||||
|
var main = 'main';
|
||||||
|
var afterMain = 'afterMain'; // modifier with the purpose to write to the DOM (or write into a framework state)
|
||||||
|
|
||||||
|
var beforeWrite = 'beforeWrite';
|
||||||
|
var write = 'write';
|
||||||
|
var afterWrite = 'afterWrite';
|
||||||
|
var modifierPhases = [beforeRead, read, afterRead, beforeMain, main, afterMain, beforeWrite, write, afterWrite];
|
||||||
|
|
||||||
|
exports.afterMain = afterMain;
|
||||||
|
exports.afterRead = afterRead;
|
||||||
|
exports.afterWrite = afterWrite;
|
||||||
|
exports.auto = auto;
|
||||||
|
exports.basePlacements = basePlacements;
|
||||||
|
exports.beforeMain = beforeMain;
|
||||||
|
exports.beforeRead = beforeRead;
|
||||||
|
exports.beforeWrite = beforeWrite;
|
||||||
|
exports.bottom = bottom;
|
||||||
|
exports.clippingParents = clippingParents;
|
||||||
|
exports.end = end;
|
||||||
|
exports.left = left;
|
||||||
|
exports.main = main;
|
||||||
|
exports.modifierPhases = modifierPhases;
|
||||||
|
exports.placements = placements;
|
||||||
|
exports.popper = popper;
|
||||||
|
exports.read = read;
|
||||||
|
exports.reference = reference;
|
||||||
|
exports.right = right;
|
||||||
|
exports.start = start;
|
||||||
|
exports.top = top;
|
||||||
|
exports.variationPlacements = variationPlacements;
|
||||||
|
exports.viewport = viewport;
|
||||||
|
exports.write = write;
|
||||||
|
//# sourceMappingURL=enums.js.map
|
||||||
3
node_modules/@popperjs/core/dist/cjs/enums.js.flow
generated
vendored
Normal file
3
node_modules/@popperjs/core/dist/cjs/enums.js.flow
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
// @flow
|
||||||
|
|
||||||
|
export * from '../../lib/enums.js'
|
||||||
1
node_modules/@popperjs/core/dist/cjs/enums.js.map
generated
vendored
Normal file
1
node_modules/@popperjs/core/dist/cjs/enums.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"enums.js","sources":["../../src/enums.js"],"sourcesContent":["// @flow\nexport const top: 'top' = 'top';\nexport const bottom: 'bottom' = 'bottom';\nexport const right: 'right' = 'right';\nexport const left: 'left' = 'left';\nexport const auto: 'auto' = 'auto';\nexport type BasePlacement =\n | typeof top\n | typeof bottom\n | typeof right\n | typeof left;\nexport const basePlacements: Array<BasePlacement> = [top, bottom, right, left];\n\nexport const start: 'start' = 'start';\nexport const end: 'end' = 'end';\nexport type Variation = typeof start | typeof end;\n\nexport const clippingParents: 'clippingParents' = 'clippingParents';\nexport const viewport: 'viewport' = 'viewport';\nexport type Boundary = Element | Array<Element> | typeof clippingParents;\nexport type RootBoundary = typeof viewport | 'document';\n\nexport const popper: 'popper' = 'popper';\nexport const reference: 'reference' = 'reference';\nexport type Context = typeof popper | typeof reference;\n\nexport type VariationPlacement =\n | 'top-start'\n | 'top-end'\n | 'bottom-start'\n | 'bottom-end'\n | 'right-start'\n | 'right-end'\n | 'left-start'\n | 'left-end';\nexport type AutoPlacement = 'auto' | 'auto-start' | 'auto-end';\nexport type ComputedPlacement = VariationPlacement | BasePlacement;\nexport type Placement = AutoPlacement | BasePlacement | VariationPlacement;\n\nexport const variationPlacements: Array<VariationPlacement> = basePlacements.reduce(\n (acc: Array<VariationPlacement>, placement: BasePlacement) =>\n acc.concat([(`${placement}-${start}`: any), (`${placement}-${end}`: any)]),\n []\n);\nexport const placements: Array<Placement> = [...basePlacements, auto].reduce(\n (\n acc: Array<Placement>,\n placement: BasePlacement | typeof auto\n ): Array<Placement> =>\n acc.concat([\n placement,\n (`${placement}-${start}`: any),\n (`${placement}-${end}`: any),\n ]),\n []\n);\n\n// modifiers that need to read the DOM\nexport const beforeRead: 'beforeRead' = 'beforeRead';\nexport const read: 'read' = 'read';\nexport const afterRead: 'afterRead' = 'afterRead';\n// pure-logic modifiers\nexport const beforeMain: 'beforeMain' = 'beforeMain';\nexport const main: 'main' = 'main';\nexport const afterMain: 'afterMain' = 'afterMain';\n// modifier with the purpose to write to the DOM (or write into a framework state)\nexport const beforeWrite: 'beforeWrite' = 'beforeWrite';\nexport const write: 'write' = 'write';\nexport const afterWrite: 'afterWrite' = 'afterWrite';\nexport const modifierPhases: Array<ModifierPhases> = [\n beforeRead,\n read,\n afterRead,\n beforeMain,\n main,\n afterMain,\n beforeWrite,\n write,\n afterWrite,\n];\n\nexport type ModifierPhases =\n | typeof beforeRead\n | typeof read\n | typeof afterRead\n | typeof beforeMain\n | typeof main\n | typeof afterMain\n | typeof beforeWrite\n | typeof write\n | typeof afterWrite;\n"],"names":["top","bottom","right","left","auto","basePlacements","start","end","clippingParents","viewport","popper","reference","variationPlacements","reduce","acc","placement","concat","placements","beforeRead","read","afterRead","beforeMain","main","afterMain","beforeWrite","write","afterWrite","modifierPhases"],"mappings":";;;;;;;;IACaA,GAAU,GAAG;IACbC,MAAgB,GAAG;IACnBC,KAAc,GAAG;IACjBC,IAAY,GAAG;IACfC,IAAY,GAAG;IAMfC,cAAoC,GAAG,CAACL,GAAD,EAAMC,MAAN,EAAcC,KAAd,EAAqBC,IAArB;IAEvCG,KAAc,GAAG;IACjBC,GAAU,GAAG;IAGbC,eAAkC,GAAG;IACrCC,QAAoB,GAAG;IAIvBC,MAAgB,GAAG;IACnBC,SAAsB,GAAG;IAgBzBC,mBAA8C,gBAAGP,cAAc,CAACQ,MAAf,CAC5D,UAACC,GAAD,EAAiCC,SAAjC;AAAA,SACED,GAAG,CAACE,MAAJ,CAAW,CAAKD,SAAL,SAAkBT,KAAlB,EAAqCS,SAArC,SAAkDR,GAAlD,CAAX,CADF;AAAA,CAD4D,EAG5D,EAH4D;IAKjDU,UAA4B,gBAAG,UAAIZ,cAAJ,GAAoBD,IAApB,GAA0BS,MAA1B,CAC1C,UACEC,GADF,EAEEC,SAFF;AAAA,SAIED,GAAG,CAACE,MAAJ,CAAW,CACTD,SADS,EAELA,SAFK,SAEQT,KAFR,EAGLS,SAHK,SAGQR,GAHR,CAAX,CAJF;AAAA,CAD0C,EAU1C,EAV0C;;IAc/BW,UAAwB,GAAG;IAC3BC,IAAY,GAAG;IACfC,SAAsB,GAAG;;IAEzBC,UAAwB,GAAG;IAC3BC,IAAY,GAAG;IACfC,SAAsB,GAAG;;IAEzBC,WAA0B,GAAG;IAC7BC,KAAc,GAAG;IACjBC,UAAwB,GAAG;IAC3BC,cAAqC,GAAG,CACnDT,UADmD,EAEnDC,IAFmD,EAGnDC,SAHmD,EAInDC,UAJmD,EAKnDC,IALmD,EAMnDC,SANmD,EAOnDC,WAPmD,EAQnDC,KARmD,EASnDC,UATmD;;;;;;;;;;;;;;;;;;;;;;;;;"}
|
||||||
939
node_modules/@popperjs/core/dist/cjs/popper-base.js
generated
vendored
Normal file
939
node_modules/@popperjs/core/dist/cjs/popper-base.js
generated
vendored
Normal file
@@ -0,0 +1,939 @@
|
|||||||
|
/**
|
||||||
|
* @popperjs/core v2.11.8 - MIT License
|
||||||
|
*/
|
||||||
|
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
Object.defineProperty(exports, '__esModule', { value: true });
|
||||||
|
|
||||||
|
function getWindow(node) {
|
||||||
|
if (node == null) {
|
||||||
|
return window;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (node.toString() !== '[object Window]') {
|
||||||
|
var ownerDocument = node.ownerDocument;
|
||||||
|
return ownerDocument ? ownerDocument.defaultView || window : window;
|
||||||
|
}
|
||||||
|
|
||||||
|
return node;
|
||||||
|
}
|
||||||
|
|
||||||
|
function isElement(node) {
|
||||||
|
var OwnElement = getWindow(node).Element;
|
||||||
|
return node instanceof OwnElement || node instanceof Element;
|
||||||
|
}
|
||||||
|
|
||||||
|
function isHTMLElement(node) {
|
||||||
|
var OwnElement = getWindow(node).HTMLElement;
|
||||||
|
return node instanceof OwnElement || node instanceof HTMLElement;
|
||||||
|
}
|
||||||
|
|
||||||
|
function isShadowRoot(node) {
|
||||||
|
// IE 11 has no ShadowRoot
|
||||||
|
if (typeof ShadowRoot === 'undefined') {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
var OwnElement = getWindow(node).ShadowRoot;
|
||||||
|
return node instanceof OwnElement || node instanceof ShadowRoot;
|
||||||
|
}
|
||||||
|
|
||||||
|
var max = Math.max;
|
||||||
|
var min = Math.min;
|
||||||
|
var round = Math.round;
|
||||||
|
|
||||||
|
function getUAString() {
|
||||||
|
var uaData = navigator.userAgentData;
|
||||||
|
|
||||||
|
if (uaData != null && uaData.brands && Array.isArray(uaData.brands)) {
|
||||||
|
return uaData.brands.map(function (item) {
|
||||||
|
return item.brand + "/" + item.version;
|
||||||
|
}).join(' ');
|
||||||
|
}
|
||||||
|
|
||||||
|
return navigator.userAgent;
|
||||||
|
}
|
||||||
|
|
||||||
|
function isLayoutViewport() {
|
||||||
|
return !/^((?!chrome|android).)*safari/i.test(getUAString());
|
||||||
|
}
|
||||||
|
|
||||||
|
function getBoundingClientRect(element, includeScale, isFixedStrategy) {
|
||||||
|
if (includeScale === void 0) {
|
||||||
|
includeScale = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isFixedStrategy === void 0) {
|
||||||
|
isFixedStrategy = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
var clientRect = element.getBoundingClientRect();
|
||||||
|
var scaleX = 1;
|
||||||
|
var scaleY = 1;
|
||||||
|
|
||||||
|
if (includeScale && isHTMLElement(element)) {
|
||||||
|
scaleX = element.offsetWidth > 0 ? round(clientRect.width) / element.offsetWidth || 1 : 1;
|
||||||
|
scaleY = element.offsetHeight > 0 ? round(clientRect.height) / element.offsetHeight || 1 : 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ref = isElement(element) ? getWindow(element) : window,
|
||||||
|
visualViewport = _ref.visualViewport;
|
||||||
|
|
||||||
|
var addVisualOffsets = !isLayoutViewport() && isFixedStrategy;
|
||||||
|
var x = (clientRect.left + (addVisualOffsets && visualViewport ? visualViewport.offsetLeft : 0)) / scaleX;
|
||||||
|
var y = (clientRect.top + (addVisualOffsets && visualViewport ? visualViewport.offsetTop : 0)) / scaleY;
|
||||||
|
var width = clientRect.width / scaleX;
|
||||||
|
var height = clientRect.height / scaleY;
|
||||||
|
return {
|
||||||
|
width: width,
|
||||||
|
height: height,
|
||||||
|
top: y,
|
||||||
|
right: x + width,
|
||||||
|
bottom: y + height,
|
||||||
|
left: x,
|
||||||
|
x: x,
|
||||||
|
y: y
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function getWindowScroll(node) {
|
||||||
|
var win = getWindow(node);
|
||||||
|
var scrollLeft = win.pageXOffset;
|
||||||
|
var scrollTop = win.pageYOffset;
|
||||||
|
return {
|
||||||
|
scrollLeft: scrollLeft,
|
||||||
|
scrollTop: scrollTop
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function getHTMLElementScroll(element) {
|
||||||
|
return {
|
||||||
|
scrollLeft: element.scrollLeft,
|
||||||
|
scrollTop: element.scrollTop
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function getNodeScroll(node) {
|
||||||
|
if (node === getWindow(node) || !isHTMLElement(node)) {
|
||||||
|
return getWindowScroll(node);
|
||||||
|
} else {
|
||||||
|
return getHTMLElementScroll(node);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function getNodeName(element) {
|
||||||
|
return element ? (element.nodeName || '').toLowerCase() : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getDocumentElement(element) {
|
||||||
|
// $FlowFixMe[incompatible-return]: assume body is always available
|
||||||
|
return ((isElement(element) ? element.ownerDocument : // $FlowFixMe[prop-missing]
|
||||||
|
element.document) || window.document).documentElement;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getWindowScrollBarX(element) {
|
||||||
|
// If <html> has a CSS width greater than the viewport, then this will be
|
||||||
|
// incorrect for RTL.
|
||||||
|
// Popper 1 is broken in this case and never had a bug report so let's assume
|
||||||
|
// it's not an issue. I don't think anyone ever specifies width on <html>
|
||||||
|
// anyway.
|
||||||
|
// Browsers where the left scrollbar doesn't cause an issue report `0` for
|
||||||
|
// this (e.g. Edge 2019, IE11, Safari)
|
||||||
|
return getBoundingClientRect(getDocumentElement(element)).left + getWindowScroll(element).scrollLeft;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getComputedStyle(element) {
|
||||||
|
return getWindow(element).getComputedStyle(element);
|
||||||
|
}
|
||||||
|
|
||||||
|
function isScrollParent(element) {
|
||||||
|
// Firefox wants us to check `-x` and `-y` variations as well
|
||||||
|
var _getComputedStyle = getComputedStyle(element),
|
||||||
|
overflow = _getComputedStyle.overflow,
|
||||||
|
overflowX = _getComputedStyle.overflowX,
|
||||||
|
overflowY = _getComputedStyle.overflowY;
|
||||||
|
|
||||||
|
return /auto|scroll|overlay|hidden/.test(overflow + overflowY + overflowX);
|
||||||
|
}
|
||||||
|
|
||||||
|
function isElementScaled(element) {
|
||||||
|
var rect = element.getBoundingClientRect();
|
||||||
|
var scaleX = round(rect.width) / element.offsetWidth || 1;
|
||||||
|
var scaleY = round(rect.height) / element.offsetHeight || 1;
|
||||||
|
return scaleX !== 1 || scaleY !== 1;
|
||||||
|
} // Returns the composite rect of an element relative to its offsetParent.
|
||||||
|
// Composite means it takes into account transforms as well as layout.
|
||||||
|
|
||||||
|
|
||||||
|
function getCompositeRect(elementOrVirtualElement, offsetParent, isFixed) {
|
||||||
|
if (isFixed === void 0) {
|
||||||
|
isFixed = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
var isOffsetParentAnElement = isHTMLElement(offsetParent);
|
||||||
|
var offsetParentIsScaled = isHTMLElement(offsetParent) && isElementScaled(offsetParent);
|
||||||
|
var documentElement = getDocumentElement(offsetParent);
|
||||||
|
var rect = getBoundingClientRect(elementOrVirtualElement, offsetParentIsScaled, isFixed);
|
||||||
|
var scroll = {
|
||||||
|
scrollLeft: 0,
|
||||||
|
scrollTop: 0
|
||||||
|
};
|
||||||
|
var offsets = {
|
||||||
|
x: 0,
|
||||||
|
y: 0
|
||||||
|
};
|
||||||
|
|
||||||
|
if (isOffsetParentAnElement || !isOffsetParentAnElement && !isFixed) {
|
||||||
|
if (getNodeName(offsetParent) !== 'body' || // https://github.com/popperjs/popper-core/issues/1078
|
||||||
|
isScrollParent(documentElement)) {
|
||||||
|
scroll = getNodeScroll(offsetParent);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isHTMLElement(offsetParent)) {
|
||||||
|
offsets = getBoundingClientRect(offsetParent, true);
|
||||||
|
offsets.x += offsetParent.clientLeft;
|
||||||
|
offsets.y += offsetParent.clientTop;
|
||||||
|
} else if (documentElement) {
|
||||||
|
offsets.x = getWindowScrollBarX(documentElement);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
x: rect.left + scroll.scrollLeft - offsets.x,
|
||||||
|
y: rect.top + scroll.scrollTop - offsets.y,
|
||||||
|
width: rect.width,
|
||||||
|
height: rect.height
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// means it doesn't take into account transforms.
|
||||||
|
|
||||||
|
function getLayoutRect(element) {
|
||||||
|
var clientRect = getBoundingClientRect(element); // Use the clientRect sizes if it's not been transformed.
|
||||||
|
// Fixes https://github.com/popperjs/popper-core/issues/1223
|
||||||
|
|
||||||
|
var width = element.offsetWidth;
|
||||||
|
var height = element.offsetHeight;
|
||||||
|
|
||||||
|
if (Math.abs(clientRect.width - width) <= 1) {
|
||||||
|
width = clientRect.width;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Math.abs(clientRect.height - height) <= 1) {
|
||||||
|
height = clientRect.height;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
x: element.offsetLeft,
|
||||||
|
y: element.offsetTop,
|
||||||
|
width: width,
|
||||||
|
height: height
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function getParentNode(element) {
|
||||||
|
if (getNodeName(element) === 'html') {
|
||||||
|
return element;
|
||||||
|
}
|
||||||
|
|
||||||
|
return (// this is a quicker (but less type safe) way to save quite some bytes from the bundle
|
||||||
|
// $FlowFixMe[incompatible-return]
|
||||||
|
// $FlowFixMe[prop-missing]
|
||||||
|
element.assignedSlot || // step into the shadow DOM of the parent of a slotted node
|
||||||
|
element.parentNode || ( // DOM Element detected
|
||||||
|
isShadowRoot(element) ? element.host : null) || // ShadowRoot detected
|
||||||
|
// $FlowFixMe[incompatible-call]: HTMLElement is a Node
|
||||||
|
getDocumentElement(element) // fallback
|
||||||
|
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function getScrollParent(node) {
|
||||||
|
if (['html', 'body', '#document'].indexOf(getNodeName(node)) >= 0) {
|
||||||
|
// $FlowFixMe[incompatible-return]: assume body is always available
|
||||||
|
return node.ownerDocument.body;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isHTMLElement(node) && isScrollParent(node)) {
|
||||||
|
return node;
|
||||||
|
}
|
||||||
|
|
||||||
|
return getScrollParent(getParentNode(node));
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
given a DOM element, return the list of all scroll parents, up the list of ancesors
|
||||||
|
until we get to the top window object. This list is what we attach scroll listeners
|
||||||
|
to, because if any of these parent elements scroll, we'll need to re-calculate the
|
||||||
|
reference element's position.
|
||||||
|
*/
|
||||||
|
|
||||||
|
function listScrollParents(element, list) {
|
||||||
|
var _element$ownerDocumen;
|
||||||
|
|
||||||
|
if (list === void 0) {
|
||||||
|
list = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
var scrollParent = getScrollParent(element);
|
||||||
|
var isBody = scrollParent === ((_element$ownerDocumen = element.ownerDocument) == null ? void 0 : _element$ownerDocumen.body);
|
||||||
|
var win = getWindow(scrollParent);
|
||||||
|
var target = isBody ? [win].concat(win.visualViewport || [], isScrollParent(scrollParent) ? scrollParent : []) : scrollParent;
|
||||||
|
var updatedList = list.concat(target);
|
||||||
|
return isBody ? updatedList : // $FlowFixMe[incompatible-call]: isBody tells us target will be an HTMLElement here
|
||||||
|
updatedList.concat(listScrollParents(getParentNode(target)));
|
||||||
|
}
|
||||||
|
|
||||||
|
function isTableElement(element) {
|
||||||
|
return ['table', 'td', 'th'].indexOf(getNodeName(element)) >= 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getTrueOffsetParent(element) {
|
||||||
|
if (!isHTMLElement(element) || // https://github.com/popperjs/popper-core/issues/837
|
||||||
|
getComputedStyle(element).position === 'fixed') {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return element.offsetParent;
|
||||||
|
} // `.offsetParent` reports `null` for fixed elements, while absolute elements
|
||||||
|
// return the containing block
|
||||||
|
|
||||||
|
|
||||||
|
function getContainingBlock(element) {
|
||||||
|
var isFirefox = /firefox/i.test(getUAString());
|
||||||
|
var isIE = /Trident/i.test(getUAString());
|
||||||
|
|
||||||
|
if (isIE && isHTMLElement(element)) {
|
||||||
|
// In IE 9, 10 and 11 fixed elements containing block is always established by the viewport
|
||||||
|
var elementCss = getComputedStyle(element);
|
||||||
|
|
||||||
|
if (elementCss.position === 'fixed') {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var currentNode = getParentNode(element);
|
||||||
|
|
||||||
|
if (isShadowRoot(currentNode)) {
|
||||||
|
currentNode = currentNode.host;
|
||||||
|
}
|
||||||
|
|
||||||
|
while (isHTMLElement(currentNode) && ['html', 'body'].indexOf(getNodeName(currentNode)) < 0) {
|
||||||
|
var css = getComputedStyle(currentNode); // This is non-exhaustive but covers the most common CSS properties that
|
||||||
|
// create a containing block.
|
||||||
|
// https://developer.mozilla.org/en-US/docs/Web/CSS/Containing_block#identifying_the_containing_block
|
||||||
|
|
||||||
|
if (css.transform !== 'none' || css.perspective !== 'none' || css.contain === 'paint' || ['transform', 'perspective'].indexOf(css.willChange) !== -1 || isFirefox && css.willChange === 'filter' || isFirefox && css.filter && css.filter !== 'none') {
|
||||||
|
return currentNode;
|
||||||
|
} else {
|
||||||
|
currentNode = currentNode.parentNode;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
} // Gets the closest ancestor positioned element. Handles some edge cases,
|
||||||
|
// such as table ancestors and cross browser bugs.
|
||||||
|
|
||||||
|
|
||||||
|
function getOffsetParent(element) {
|
||||||
|
var window = getWindow(element);
|
||||||
|
var offsetParent = getTrueOffsetParent(element);
|
||||||
|
|
||||||
|
while (offsetParent && isTableElement(offsetParent) && getComputedStyle(offsetParent).position === 'static') {
|
||||||
|
offsetParent = getTrueOffsetParent(offsetParent);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (offsetParent && (getNodeName(offsetParent) === 'html' || getNodeName(offsetParent) === 'body' && getComputedStyle(offsetParent).position === 'static')) {
|
||||||
|
return window;
|
||||||
|
}
|
||||||
|
|
||||||
|
return offsetParent || getContainingBlock(element) || window;
|
||||||
|
}
|
||||||
|
|
||||||
|
var top = 'top';
|
||||||
|
var bottom = 'bottom';
|
||||||
|
var right = 'right';
|
||||||
|
var left = 'left';
|
||||||
|
var basePlacements = [top, bottom, right, left];
|
||||||
|
var start = 'start';
|
||||||
|
var end = 'end';
|
||||||
|
var clippingParents = 'clippingParents';
|
||||||
|
var viewport = 'viewport';
|
||||||
|
var popper = 'popper';
|
||||||
|
var reference = 'reference';
|
||||||
|
|
||||||
|
var beforeRead = 'beforeRead';
|
||||||
|
var read = 'read';
|
||||||
|
var afterRead = 'afterRead'; // pure-logic modifiers
|
||||||
|
|
||||||
|
var beforeMain = 'beforeMain';
|
||||||
|
var main = 'main';
|
||||||
|
var afterMain = 'afterMain'; // modifier with the purpose to write to the DOM (or write into a framework state)
|
||||||
|
|
||||||
|
var beforeWrite = 'beforeWrite';
|
||||||
|
var write = 'write';
|
||||||
|
var afterWrite = 'afterWrite';
|
||||||
|
var modifierPhases = [beforeRead, read, afterRead, beforeMain, main, afterMain, beforeWrite, write, afterWrite];
|
||||||
|
|
||||||
|
function order(modifiers) {
|
||||||
|
var map = new Map();
|
||||||
|
var visited = new Set();
|
||||||
|
var result = [];
|
||||||
|
modifiers.forEach(function (modifier) {
|
||||||
|
map.set(modifier.name, modifier);
|
||||||
|
}); // On visiting object, check for its dependencies and visit them recursively
|
||||||
|
|
||||||
|
function sort(modifier) {
|
||||||
|
visited.add(modifier.name);
|
||||||
|
var requires = [].concat(modifier.requires || [], modifier.requiresIfExists || []);
|
||||||
|
requires.forEach(function (dep) {
|
||||||
|
if (!visited.has(dep)) {
|
||||||
|
var depModifier = map.get(dep);
|
||||||
|
|
||||||
|
if (depModifier) {
|
||||||
|
sort(depModifier);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
result.push(modifier);
|
||||||
|
}
|
||||||
|
|
||||||
|
modifiers.forEach(function (modifier) {
|
||||||
|
if (!visited.has(modifier.name)) {
|
||||||
|
// check for visited object
|
||||||
|
sort(modifier);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
function orderModifiers(modifiers) {
|
||||||
|
// order based on dependencies
|
||||||
|
var orderedModifiers = order(modifiers); // order based on phase
|
||||||
|
|
||||||
|
return modifierPhases.reduce(function (acc, phase) {
|
||||||
|
return acc.concat(orderedModifiers.filter(function (modifier) {
|
||||||
|
return modifier.phase === phase;
|
||||||
|
}));
|
||||||
|
}, []);
|
||||||
|
}
|
||||||
|
|
||||||
|
function debounce(fn) {
|
||||||
|
var pending;
|
||||||
|
return function () {
|
||||||
|
if (!pending) {
|
||||||
|
pending = new Promise(function (resolve) {
|
||||||
|
Promise.resolve().then(function () {
|
||||||
|
pending = undefined;
|
||||||
|
resolve(fn());
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return pending;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function mergeByName(modifiers) {
|
||||||
|
var merged = modifiers.reduce(function (merged, current) {
|
||||||
|
var existing = merged[current.name];
|
||||||
|
merged[current.name] = existing ? Object.assign({}, existing, current, {
|
||||||
|
options: Object.assign({}, existing.options, current.options),
|
||||||
|
data: Object.assign({}, existing.data, current.data)
|
||||||
|
}) : current;
|
||||||
|
return merged;
|
||||||
|
}, {}); // IE11 does not support Object.values
|
||||||
|
|
||||||
|
return Object.keys(merged).map(function (key) {
|
||||||
|
return merged[key];
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function getViewportRect(element, strategy) {
|
||||||
|
var win = getWindow(element);
|
||||||
|
var html = getDocumentElement(element);
|
||||||
|
var visualViewport = win.visualViewport;
|
||||||
|
var width = html.clientWidth;
|
||||||
|
var height = html.clientHeight;
|
||||||
|
var x = 0;
|
||||||
|
var y = 0;
|
||||||
|
|
||||||
|
if (visualViewport) {
|
||||||
|
width = visualViewport.width;
|
||||||
|
height = visualViewport.height;
|
||||||
|
var layoutViewport = isLayoutViewport();
|
||||||
|
|
||||||
|
if (layoutViewport || !layoutViewport && strategy === 'fixed') {
|
||||||
|
x = visualViewport.offsetLeft;
|
||||||
|
y = visualViewport.offsetTop;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
width: width,
|
||||||
|
height: height,
|
||||||
|
x: x + getWindowScrollBarX(element),
|
||||||
|
y: y
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// of the `<html>` and `<body>` rect bounds if horizontally scrollable
|
||||||
|
|
||||||
|
function getDocumentRect(element) {
|
||||||
|
var _element$ownerDocumen;
|
||||||
|
|
||||||
|
var html = getDocumentElement(element);
|
||||||
|
var winScroll = getWindowScroll(element);
|
||||||
|
var body = (_element$ownerDocumen = element.ownerDocument) == null ? void 0 : _element$ownerDocumen.body;
|
||||||
|
var width = max(html.scrollWidth, html.clientWidth, body ? body.scrollWidth : 0, body ? body.clientWidth : 0);
|
||||||
|
var height = max(html.scrollHeight, html.clientHeight, body ? body.scrollHeight : 0, body ? body.clientHeight : 0);
|
||||||
|
var x = -winScroll.scrollLeft + getWindowScrollBarX(element);
|
||||||
|
var y = -winScroll.scrollTop;
|
||||||
|
|
||||||
|
if (getComputedStyle(body || html).direction === 'rtl') {
|
||||||
|
x += max(html.clientWidth, body ? body.clientWidth : 0) - width;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
width: width,
|
||||||
|
height: height,
|
||||||
|
x: x,
|
||||||
|
y: y
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function contains(parent, child) {
|
||||||
|
var rootNode = child.getRootNode && child.getRootNode(); // First, attempt with faster native method
|
||||||
|
|
||||||
|
if (parent.contains(child)) {
|
||||||
|
return true;
|
||||||
|
} // then fallback to custom implementation with Shadow DOM support
|
||||||
|
else if (rootNode && isShadowRoot(rootNode)) {
|
||||||
|
var next = child;
|
||||||
|
|
||||||
|
do {
|
||||||
|
if (next && parent.isSameNode(next)) {
|
||||||
|
return true;
|
||||||
|
} // $FlowFixMe[prop-missing]: need a better way to handle this...
|
||||||
|
|
||||||
|
|
||||||
|
next = next.parentNode || next.host;
|
||||||
|
} while (next);
|
||||||
|
} // Give up, the result is false
|
||||||
|
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
function rectToClientRect(rect) {
|
||||||
|
return Object.assign({}, rect, {
|
||||||
|
left: rect.x,
|
||||||
|
top: rect.y,
|
||||||
|
right: rect.x + rect.width,
|
||||||
|
bottom: rect.y + rect.height
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function getInnerBoundingClientRect(element, strategy) {
|
||||||
|
var rect = getBoundingClientRect(element, false, strategy === 'fixed');
|
||||||
|
rect.top = rect.top + element.clientTop;
|
||||||
|
rect.left = rect.left + element.clientLeft;
|
||||||
|
rect.bottom = rect.top + element.clientHeight;
|
||||||
|
rect.right = rect.left + element.clientWidth;
|
||||||
|
rect.width = element.clientWidth;
|
||||||
|
rect.height = element.clientHeight;
|
||||||
|
rect.x = rect.left;
|
||||||
|
rect.y = rect.top;
|
||||||
|
return rect;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getClientRectFromMixedType(element, clippingParent, strategy) {
|
||||||
|
return clippingParent === viewport ? rectToClientRect(getViewportRect(element, strategy)) : isElement(clippingParent) ? getInnerBoundingClientRect(clippingParent, strategy) : rectToClientRect(getDocumentRect(getDocumentElement(element)));
|
||||||
|
} // A "clipping parent" is an overflowable container with the characteristic of
|
||||||
|
// clipping (or hiding) overflowing elements with a position different from
|
||||||
|
// `initial`
|
||||||
|
|
||||||
|
|
||||||
|
function getClippingParents(element) {
|
||||||
|
var clippingParents = listScrollParents(getParentNode(element));
|
||||||
|
var canEscapeClipping = ['absolute', 'fixed'].indexOf(getComputedStyle(element).position) >= 0;
|
||||||
|
var clipperElement = canEscapeClipping && isHTMLElement(element) ? getOffsetParent(element) : element;
|
||||||
|
|
||||||
|
if (!isElement(clipperElement)) {
|
||||||
|
return [];
|
||||||
|
} // $FlowFixMe[incompatible-return]: https://github.com/facebook/flow/issues/1414
|
||||||
|
|
||||||
|
|
||||||
|
return clippingParents.filter(function (clippingParent) {
|
||||||
|
return isElement(clippingParent) && contains(clippingParent, clipperElement) && getNodeName(clippingParent) !== 'body';
|
||||||
|
});
|
||||||
|
} // Gets the maximum area that the element is visible in due to any number of
|
||||||
|
// clipping parents
|
||||||
|
|
||||||
|
|
||||||
|
function getClippingRect(element, boundary, rootBoundary, strategy) {
|
||||||
|
var mainClippingParents = boundary === 'clippingParents' ? getClippingParents(element) : [].concat(boundary);
|
||||||
|
var clippingParents = [].concat(mainClippingParents, [rootBoundary]);
|
||||||
|
var firstClippingParent = clippingParents[0];
|
||||||
|
var clippingRect = clippingParents.reduce(function (accRect, clippingParent) {
|
||||||
|
var rect = getClientRectFromMixedType(element, clippingParent, strategy);
|
||||||
|
accRect.top = max(rect.top, accRect.top);
|
||||||
|
accRect.right = min(rect.right, accRect.right);
|
||||||
|
accRect.bottom = min(rect.bottom, accRect.bottom);
|
||||||
|
accRect.left = max(rect.left, accRect.left);
|
||||||
|
return accRect;
|
||||||
|
}, getClientRectFromMixedType(element, firstClippingParent, strategy));
|
||||||
|
clippingRect.width = clippingRect.right - clippingRect.left;
|
||||||
|
clippingRect.height = clippingRect.bottom - clippingRect.top;
|
||||||
|
clippingRect.x = clippingRect.left;
|
||||||
|
clippingRect.y = clippingRect.top;
|
||||||
|
return clippingRect;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getBasePlacement(placement) {
|
||||||
|
return placement.split('-')[0];
|
||||||
|
}
|
||||||
|
|
||||||
|
function getVariation(placement) {
|
||||||
|
return placement.split('-')[1];
|
||||||
|
}
|
||||||
|
|
||||||
|
function getMainAxisFromPlacement(placement) {
|
||||||
|
return ['top', 'bottom'].indexOf(placement) >= 0 ? 'x' : 'y';
|
||||||
|
}
|
||||||
|
|
||||||
|
function computeOffsets(_ref) {
|
||||||
|
var reference = _ref.reference,
|
||||||
|
element = _ref.element,
|
||||||
|
placement = _ref.placement;
|
||||||
|
var basePlacement = placement ? getBasePlacement(placement) : null;
|
||||||
|
var variation = placement ? getVariation(placement) : null;
|
||||||
|
var commonX = reference.x + reference.width / 2 - element.width / 2;
|
||||||
|
var commonY = reference.y + reference.height / 2 - element.height / 2;
|
||||||
|
var offsets;
|
||||||
|
|
||||||
|
switch (basePlacement) {
|
||||||
|
case top:
|
||||||
|
offsets = {
|
||||||
|
x: commonX,
|
||||||
|
y: reference.y - element.height
|
||||||
|
};
|
||||||
|
break;
|
||||||
|
|
||||||
|
case bottom:
|
||||||
|
offsets = {
|
||||||
|
x: commonX,
|
||||||
|
y: reference.y + reference.height
|
||||||
|
};
|
||||||
|
break;
|
||||||
|
|
||||||
|
case right:
|
||||||
|
offsets = {
|
||||||
|
x: reference.x + reference.width,
|
||||||
|
y: commonY
|
||||||
|
};
|
||||||
|
break;
|
||||||
|
|
||||||
|
case left:
|
||||||
|
offsets = {
|
||||||
|
x: reference.x - element.width,
|
||||||
|
y: commonY
|
||||||
|
};
|
||||||
|
break;
|
||||||
|
|
||||||
|
default:
|
||||||
|
offsets = {
|
||||||
|
x: reference.x,
|
||||||
|
y: reference.y
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
var mainAxis = basePlacement ? getMainAxisFromPlacement(basePlacement) : null;
|
||||||
|
|
||||||
|
if (mainAxis != null) {
|
||||||
|
var len = mainAxis === 'y' ? 'height' : 'width';
|
||||||
|
|
||||||
|
switch (variation) {
|
||||||
|
case start:
|
||||||
|
offsets[mainAxis] = offsets[mainAxis] - (reference[len] / 2 - element[len] / 2);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case end:
|
||||||
|
offsets[mainAxis] = offsets[mainAxis] + (reference[len] / 2 - element[len] / 2);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return offsets;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getFreshSideObject() {
|
||||||
|
return {
|
||||||
|
top: 0,
|
||||||
|
right: 0,
|
||||||
|
bottom: 0,
|
||||||
|
left: 0
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function mergePaddingObject(paddingObject) {
|
||||||
|
return Object.assign({}, getFreshSideObject(), paddingObject);
|
||||||
|
}
|
||||||
|
|
||||||
|
function expandToHashMap(value, keys) {
|
||||||
|
return keys.reduce(function (hashMap, key) {
|
||||||
|
hashMap[key] = value;
|
||||||
|
return hashMap;
|
||||||
|
}, {});
|
||||||
|
}
|
||||||
|
|
||||||
|
function detectOverflow(state, options) {
|
||||||
|
if (options === void 0) {
|
||||||
|
options = {};
|
||||||
|
}
|
||||||
|
|
||||||
|
var _options = options,
|
||||||
|
_options$placement = _options.placement,
|
||||||
|
placement = _options$placement === void 0 ? state.placement : _options$placement,
|
||||||
|
_options$strategy = _options.strategy,
|
||||||
|
strategy = _options$strategy === void 0 ? state.strategy : _options$strategy,
|
||||||
|
_options$boundary = _options.boundary,
|
||||||
|
boundary = _options$boundary === void 0 ? clippingParents : _options$boundary,
|
||||||
|
_options$rootBoundary = _options.rootBoundary,
|
||||||
|
rootBoundary = _options$rootBoundary === void 0 ? viewport : _options$rootBoundary,
|
||||||
|
_options$elementConte = _options.elementContext,
|
||||||
|
elementContext = _options$elementConte === void 0 ? popper : _options$elementConte,
|
||||||
|
_options$altBoundary = _options.altBoundary,
|
||||||
|
altBoundary = _options$altBoundary === void 0 ? false : _options$altBoundary,
|
||||||
|
_options$padding = _options.padding,
|
||||||
|
padding = _options$padding === void 0 ? 0 : _options$padding;
|
||||||
|
var paddingObject = mergePaddingObject(typeof padding !== 'number' ? padding : expandToHashMap(padding, basePlacements));
|
||||||
|
var altContext = elementContext === popper ? reference : popper;
|
||||||
|
var popperRect = state.rects.popper;
|
||||||
|
var element = state.elements[altBoundary ? altContext : elementContext];
|
||||||
|
var clippingClientRect = getClippingRect(isElement(element) ? element : element.contextElement || getDocumentElement(state.elements.popper), boundary, rootBoundary, strategy);
|
||||||
|
var referenceClientRect = getBoundingClientRect(state.elements.reference);
|
||||||
|
var popperOffsets = computeOffsets({
|
||||||
|
reference: referenceClientRect,
|
||||||
|
element: popperRect,
|
||||||
|
strategy: 'absolute',
|
||||||
|
placement: placement
|
||||||
|
});
|
||||||
|
var popperClientRect = rectToClientRect(Object.assign({}, popperRect, popperOffsets));
|
||||||
|
var elementClientRect = elementContext === popper ? popperClientRect : referenceClientRect; // positive = overflowing the clipping rect
|
||||||
|
// 0 or negative = within the clipping rect
|
||||||
|
|
||||||
|
var overflowOffsets = {
|
||||||
|
top: clippingClientRect.top - elementClientRect.top + paddingObject.top,
|
||||||
|
bottom: elementClientRect.bottom - clippingClientRect.bottom + paddingObject.bottom,
|
||||||
|
left: clippingClientRect.left - elementClientRect.left + paddingObject.left,
|
||||||
|
right: elementClientRect.right - clippingClientRect.right + paddingObject.right
|
||||||
|
};
|
||||||
|
var offsetData = state.modifiersData.offset; // Offsets can be applied only to the popper element
|
||||||
|
|
||||||
|
if (elementContext === popper && offsetData) {
|
||||||
|
var offset = offsetData[placement];
|
||||||
|
Object.keys(overflowOffsets).forEach(function (key) {
|
||||||
|
var multiply = [right, bottom].indexOf(key) >= 0 ? 1 : -1;
|
||||||
|
var axis = [top, bottom].indexOf(key) >= 0 ? 'y' : 'x';
|
||||||
|
overflowOffsets[key] += offset[axis] * multiply;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return overflowOffsets;
|
||||||
|
}
|
||||||
|
|
||||||
|
var DEFAULT_OPTIONS = {
|
||||||
|
placement: 'bottom',
|
||||||
|
modifiers: [],
|
||||||
|
strategy: 'absolute'
|
||||||
|
};
|
||||||
|
|
||||||
|
function areValidElements() {
|
||||||
|
for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
|
||||||
|
args[_key] = arguments[_key];
|
||||||
|
}
|
||||||
|
|
||||||
|
return !args.some(function (element) {
|
||||||
|
return !(element && typeof element.getBoundingClientRect === 'function');
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function popperGenerator(generatorOptions) {
|
||||||
|
if (generatorOptions === void 0) {
|
||||||
|
generatorOptions = {};
|
||||||
|
}
|
||||||
|
|
||||||
|
var _generatorOptions = generatorOptions,
|
||||||
|
_generatorOptions$def = _generatorOptions.defaultModifiers,
|
||||||
|
defaultModifiers = _generatorOptions$def === void 0 ? [] : _generatorOptions$def,
|
||||||
|
_generatorOptions$def2 = _generatorOptions.defaultOptions,
|
||||||
|
defaultOptions = _generatorOptions$def2 === void 0 ? DEFAULT_OPTIONS : _generatorOptions$def2;
|
||||||
|
return function createPopper(reference, popper, options) {
|
||||||
|
if (options === void 0) {
|
||||||
|
options = defaultOptions;
|
||||||
|
}
|
||||||
|
|
||||||
|
var state = {
|
||||||
|
placement: 'bottom',
|
||||||
|
orderedModifiers: [],
|
||||||
|
options: Object.assign({}, DEFAULT_OPTIONS, defaultOptions),
|
||||||
|
modifiersData: {},
|
||||||
|
elements: {
|
||||||
|
reference: reference,
|
||||||
|
popper: popper
|
||||||
|
},
|
||||||
|
attributes: {},
|
||||||
|
styles: {}
|
||||||
|
};
|
||||||
|
var effectCleanupFns = [];
|
||||||
|
var isDestroyed = false;
|
||||||
|
var instance = {
|
||||||
|
state: state,
|
||||||
|
setOptions: function setOptions(setOptionsAction) {
|
||||||
|
var options = typeof setOptionsAction === 'function' ? setOptionsAction(state.options) : setOptionsAction;
|
||||||
|
cleanupModifierEffects();
|
||||||
|
state.options = Object.assign({}, defaultOptions, state.options, options);
|
||||||
|
state.scrollParents = {
|
||||||
|
reference: isElement(reference) ? listScrollParents(reference) : reference.contextElement ? listScrollParents(reference.contextElement) : [],
|
||||||
|
popper: listScrollParents(popper)
|
||||||
|
}; // Orders the modifiers based on their dependencies and `phase`
|
||||||
|
// properties
|
||||||
|
|
||||||
|
var orderedModifiers = orderModifiers(mergeByName([].concat(defaultModifiers, state.options.modifiers))); // Strip out disabled modifiers
|
||||||
|
|
||||||
|
state.orderedModifiers = orderedModifiers.filter(function (m) {
|
||||||
|
return m.enabled;
|
||||||
|
});
|
||||||
|
runModifierEffects();
|
||||||
|
return instance.update();
|
||||||
|
},
|
||||||
|
// Sync update – it will always be executed, even if not necessary. This
|
||||||
|
// is useful for low frequency updates where sync behavior simplifies the
|
||||||
|
// logic.
|
||||||
|
// For high frequency updates (e.g. `resize` and `scroll` events), always
|
||||||
|
// prefer the async Popper#update method
|
||||||
|
forceUpdate: function forceUpdate() {
|
||||||
|
if (isDestroyed) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var _state$elements = state.elements,
|
||||||
|
reference = _state$elements.reference,
|
||||||
|
popper = _state$elements.popper; // Don't proceed if `reference` or `popper` are not valid elements
|
||||||
|
// anymore
|
||||||
|
|
||||||
|
if (!areValidElements(reference, popper)) {
|
||||||
|
return;
|
||||||
|
} // Store the reference and popper rects to be read by modifiers
|
||||||
|
|
||||||
|
|
||||||
|
state.rects = {
|
||||||
|
reference: getCompositeRect(reference, getOffsetParent(popper), state.options.strategy === 'fixed'),
|
||||||
|
popper: getLayoutRect(popper)
|
||||||
|
}; // Modifiers have the ability to reset the current update cycle. The
|
||||||
|
// most common use case for this is the `flip` modifier changing the
|
||||||
|
// placement, which then needs to re-run all the modifiers, because the
|
||||||
|
// logic was previously ran for the previous placement and is therefore
|
||||||
|
// stale/incorrect
|
||||||
|
|
||||||
|
state.reset = false;
|
||||||
|
state.placement = state.options.placement; // On each update cycle, the `modifiersData` property for each modifier
|
||||||
|
// is filled with the initial data specified by the modifier. This means
|
||||||
|
// it doesn't persist and is fresh on each update.
|
||||||
|
// To ensure persistent data, use `${name}#persistent`
|
||||||
|
|
||||||
|
state.orderedModifiers.forEach(function (modifier) {
|
||||||
|
return state.modifiersData[modifier.name] = Object.assign({}, modifier.data);
|
||||||
|
});
|
||||||
|
|
||||||
|
for (var index = 0; index < state.orderedModifiers.length; index++) {
|
||||||
|
if (state.reset === true) {
|
||||||
|
state.reset = false;
|
||||||
|
index = -1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
var _state$orderedModifie = state.orderedModifiers[index],
|
||||||
|
fn = _state$orderedModifie.fn,
|
||||||
|
_state$orderedModifie2 = _state$orderedModifie.options,
|
||||||
|
_options = _state$orderedModifie2 === void 0 ? {} : _state$orderedModifie2,
|
||||||
|
name = _state$orderedModifie.name;
|
||||||
|
|
||||||
|
if (typeof fn === 'function') {
|
||||||
|
state = fn({
|
||||||
|
state: state,
|
||||||
|
options: _options,
|
||||||
|
name: name,
|
||||||
|
instance: instance
|
||||||
|
}) || state;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
// Async and optimistically optimized update – it will not be executed if
|
||||||
|
// not necessary (debounced to run at most once-per-tick)
|
||||||
|
update: debounce(function () {
|
||||||
|
return new Promise(function (resolve) {
|
||||||
|
instance.forceUpdate();
|
||||||
|
resolve(state);
|
||||||
|
});
|
||||||
|
}),
|
||||||
|
destroy: function destroy() {
|
||||||
|
cleanupModifierEffects();
|
||||||
|
isDestroyed = true;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!areValidElements(reference, popper)) {
|
||||||
|
return instance;
|
||||||
|
}
|
||||||
|
|
||||||
|
instance.setOptions(options).then(function (state) {
|
||||||
|
if (!isDestroyed && options.onFirstUpdate) {
|
||||||
|
options.onFirstUpdate(state);
|
||||||
|
}
|
||||||
|
}); // Modifiers have the ability to execute arbitrary code before the first
|
||||||
|
// update cycle runs. They will be executed in the same order as the update
|
||||||
|
// cycle. This is useful when a modifier adds some persistent data that
|
||||||
|
// other modifiers need to use, but the modifier is run after the dependent
|
||||||
|
// one.
|
||||||
|
|
||||||
|
function runModifierEffects() {
|
||||||
|
state.orderedModifiers.forEach(function (_ref) {
|
||||||
|
var name = _ref.name,
|
||||||
|
_ref$options = _ref.options,
|
||||||
|
options = _ref$options === void 0 ? {} : _ref$options,
|
||||||
|
effect = _ref.effect;
|
||||||
|
|
||||||
|
if (typeof effect === 'function') {
|
||||||
|
var cleanupFn = effect({
|
||||||
|
state: state,
|
||||||
|
name: name,
|
||||||
|
instance: instance,
|
||||||
|
options: options
|
||||||
|
});
|
||||||
|
|
||||||
|
var noopFn = function noopFn() {};
|
||||||
|
|
||||||
|
effectCleanupFns.push(cleanupFn || noopFn);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function cleanupModifierEffects() {
|
||||||
|
effectCleanupFns.forEach(function (fn) {
|
||||||
|
return fn();
|
||||||
|
});
|
||||||
|
effectCleanupFns = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
return instance;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
var createPopper = /*#__PURE__*/popperGenerator(); // eslint-disable-next-line import/no-unused-modules
|
||||||
|
|
||||||
|
exports.createPopper = createPopper;
|
||||||
|
exports.detectOverflow = detectOverflow;
|
||||||
|
exports.popperGenerator = popperGenerator;
|
||||||
|
//# sourceMappingURL=popper-base.js.map
|
||||||
3
node_modules/@popperjs/core/dist/cjs/popper-base.js.flow
generated
vendored
Normal file
3
node_modules/@popperjs/core/dist/cjs/popper-base.js.flow
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
// @flow
|
||||||
|
|
||||||
|
export * from '../../lib/popper-base.js'
|
||||||
1
node_modules/@popperjs/core/dist/cjs/popper-base.js.map
generated
vendored
Normal file
1
node_modules/@popperjs/core/dist/cjs/popper-base.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1260
node_modules/@popperjs/core/dist/cjs/popper-lite.js
generated
vendored
Normal file
1260
node_modules/@popperjs/core/dist/cjs/popper-lite.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
3
node_modules/@popperjs/core/dist/cjs/popper-lite.js.flow
generated
vendored
Normal file
3
node_modules/@popperjs/core/dist/cjs/popper-lite.js.flow
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
// @flow
|
||||||
|
|
||||||
|
export * from '../../lib/popper-lite.js'
|
||||||
1
node_modules/@popperjs/core/dist/cjs/popper-lite.js.map
generated
vendored
Normal file
1
node_modules/@popperjs/core/dist/cjs/popper-lite.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1819
node_modules/@popperjs/core/dist/cjs/popper.js
generated
vendored
Normal file
1819
node_modules/@popperjs/core/dist/cjs/popper.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
3
node_modules/@popperjs/core/dist/cjs/popper.js.flow
generated
vendored
Normal file
3
node_modules/@popperjs/core/dist/cjs/popper.js.flow
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
// @flow
|
||||||
|
|
||||||
|
export * from '../../lib/popper.js'
|
||||||
1
node_modules/@popperjs/core/dist/cjs/popper.js.map
generated
vendored
Normal file
1
node_modules/@popperjs/core/dist/cjs/popper.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
199
node_modules/@popperjs/core/dist/esm/createPopper.js
generated
vendored
Normal file
199
node_modules/@popperjs/core/dist/esm/createPopper.js
generated
vendored
Normal file
@@ -0,0 +1,199 @@
|
|||||||
|
import getCompositeRect from "./dom-utils/getCompositeRect.js";
|
||||||
|
import getLayoutRect from "./dom-utils/getLayoutRect.js";
|
||||||
|
import listScrollParents from "./dom-utils/listScrollParents.js";
|
||||||
|
import getOffsetParent from "./dom-utils/getOffsetParent.js";
|
||||||
|
import orderModifiers from "./utils/orderModifiers.js";
|
||||||
|
import debounce from "./utils/debounce.js";
|
||||||
|
import mergeByName from "./utils/mergeByName.js";
|
||||||
|
import detectOverflow from "./utils/detectOverflow.js";
|
||||||
|
import { isElement } from "./dom-utils/instanceOf.js";
|
||||||
|
var DEFAULT_OPTIONS = {
|
||||||
|
placement: 'bottom',
|
||||||
|
modifiers: [],
|
||||||
|
strategy: 'absolute'
|
||||||
|
};
|
||||||
|
|
||||||
|
function areValidElements() {
|
||||||
|
for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
|
||||||
|
args[_key] = arguments[_key];
|
||||||
|
}
|
||||||
|
|
||||||
|
return !args.some(function (element) {
|
||||||
|
return !(element && typeof element.getBoundingClientRect === 'function');
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export function popperGenerator(generatorOptions) {
|
||||||
|
if (generatorOptions === void 0) {
|
||||||
|
generatorOptions = {};
|
||||||
|
}
|
||||||
|
|
||||||
|
var _generatorOptions = generatorOptions,
|
||||||
|
_generatorOptions$def = _generatorOptions.defaultModifiers,
|
||||||
|
defaultModifiers = _generatorOptions$def === void 0 ? [] : _generatorOptions$def,
|
||||||
|
_generatorOptions$def2 = _generatorOptions.defaultOptions,
|
||||||
|
defaultOptions = _generatorOptions$def2 === void 0 ? DEFAULT_OPTIONS : _generatorOptions$def2;
|
||||||
|
return function createPopper(reference, popper, options) {
|
||||||
|
if (options === void 0) {
|
||||||
|
options = defaultOptions;
|
||||||
|
}
|
||||||
|
|
||||||
|
var state = {
|
||||||
|
placement: 'bottom',
|
||||||
|
orderedModifiers: [],
|
||||||
|
options: Object.assign({}, DEFAULT_OPTIONS, defaultOptions),
|
||||||
|
modifiersData: {},
|
||||||
|
elements: {
|
||||||
|
reference: reference,
|
||||||
|
popper: popper
|
||||||
|
},
|
||||||
|
attributes: {},
|
||||||
|
styles: {}
|
||||||
|
};
|
||||||
|
var effectCleanupFns = [];
|
||||||
|
var isDestroyed = false;
|
||||||
|
var instance = {
|
||||||
|
state: state,
|
||||||
|
setOptions: function setOptions(setOptionsAction) {
|
||||||
|
var options = typeof setOptionsAction === 'function' ? setOptionsAction(state.options) : setOptionsAction;
|
||||||
|
cleanupModifierEffects();
|
||||||
|
state.options = Object.assign({}, defaultOptions, state.options, options);
|
||||||
|
state.scrollParents = {
|
||||||
|
reference: isElement(reference) ? listScrollParents(reference) : reference.contextElement ? listScrollParents(reference.contextElement) : [],
|
||||||
|
popper: listScrollParents(popper)
|
||||||
|
}; // Orders the modifiers based on their dependencies and `phase`
|
||||||
|
// properties
|
||||||
|
|
||||||
|
var orderedModifiers = orderModifiers(mergeByName([].concat(defaultModifiers, state.options.modifiers))); // Strip out disabled modifiers
|
||||||
|
|
||||||
|
state.orderedModifiers = orderedModifiers.filter(function (m) {
|
||||||
|
return m.enabled;
|
||||||
|
});
|
||||||
|
runModifierEffects();
|
||||||
|
return instance.update();
|
||||||
|
},
|
||||||
|
// Sync update – it will always be executed, even if not necessary. This
|
||||||
|
// is useful for low frequency updates where sync behavior simplifies the
|
||||||
|
// logic.
|
||||||
|
// For high frequency updates (e.g. `resize` and `scroll` events), always
|
||||||
|
// prefer the async Popper#update method
|
||||||
|
forceUpdate: function forceUpdate() {
|
||||||
|
if (isDestroyed) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var _state$elements = state.elements,
|
||||||
|
reference = _state$elements.reference,
|
||||||
|
popper = _state$elements.popper; // Don't proceed if `reference` or `popper` are not valid elements
|
||||||
|
// anymore
|
||||||
|
|
||||||
|
if (!areValidElements(reference, popper)) {
|
||||||
|
return;
|
||||||
|
} // Store the reference and popper rects to be read by modifiers
|
||||||
|
|
||||||
|
|
||||||
|
state.rects = {
|
||||||
|
reference: getCompositeRect(reference, getOffsetParent(popper), state.options.strategy === 'fixed'),
|
||||||
|
popper: getLayoutRect(popper)
|
||||||
|
}; // Modifiers have the ability to reset the current update cycle. The
|
||||||
|
// most common use case for this is the `flip` modifier changing the
|
||||||
|
// placement, which then needs to re-run all the modifiers, because the
|
||||||
|
// logic was previously ran for the previous placement and is therefore
|
||||||
|
// stale/incorrect
|
||||||
|
|
||||||
|
state.reset = false;
|
||||||
|
state.placement = state.options.placement; // On each update cycle, the `modifiersData` property for each modifier
|
||||||
|
// is filled with the initial data specified by the modifier. This means
|
||||||
|
// it doesn't persist and is fresh on each update.
|
||||||
|
// To ensure persistent data, use `${name}#persistent`
|
||||||
|
|
||||||
|
state.orderedModifiers.forEach(function (modifier) {
|
||||||
|
return state.modifiersData[modifier.name] = Object.assign({}, modifier.data);
|
||||||
|
});
|
||||||
|
|
||||||
|
for (var index = 0; index < state.orderedModifiers.length; index++) {
|
||||||
|
if (state.reset === true) {
|
||||||
|
state.reset = false;
|
||||||
|
index = -1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
var _state$orderedModifie = state.orderedModifiers[index],
|
||||||
|
fn = _state$orderedModifie.fn,
|
||||||
|
_state$orderedModifie2 = _state$orderedModifie.options,
|
||||||
|
_options = _state$orderedModifie2 === void 0 ? {} : _state$orderedModifie2,
|
||||||
|
name = _state$orderedModifie.name;
|
||||||
|
|
||||||
|
if (typeof fn === 'function') {
|
||||||
|
state = fn({
|
||||||
|
state: state,
|
||||||
|
options: _options,
|
||||||
|
name: name,
|
||||||
|
instance: instance
|
||||||
|
}) || state;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
// Async and optimistically optimized update – it will not be executed if
|
||||||
|
// not necessary (debounced to run at most once-per-tick)
|
||||||
|
update: debounce(function () {
|
||||||
|
return new Promise(function (resolve) {
|
||||||
|
instance.forceUpdate();
|
||||||
|
resolve(state);
|
||||||
|
});
|
||||||
|
}),
|
||||||
|
destroy: function destroy() {
|
||||||
|
cleanupModifierEffects();
|
||||||
|
isDestroyed = true;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!areValidElements(reference, popper)) {
|
||||||
|
return instance;
|
||||||
|
}
|
||||||
|
|
||||||
|
instance.setOptions(options).then(function (state) {
|
||||||
|
if (!isDestroyed && options.onFirstUpdate) {
|
||||||
|
options.onFirstUpdate(state);
|
||||||
|
}
|
||||||
|
}); // Modifiers have the ability to execute arbitrary code before the first
|
||||||
|
// update cycle runs. They will be executed in the same order as the update
|
||||||
|
// cycle. This is useful when a modifier adds some persistent data that
|
||||||
|
// other modifiers need to use, but the modifier is run after the dependent
|
||||||
|
// one.
|
||||||
|
|
||||||
|
function runModifierEffects() {
|
||||||
|
state.orderedModifiers.forEach(function (_ref) {
|
||||||
|
var name = _ref.name,
|
||||||
|
_ref$options = _ref.options,
|
||||||
|
options = _ref$options === void 0 ? {} : _ref$options,
|
||||||
|
effect = _ref.effect;
|
||||||
|
|
||||||
|
if (typeof effect === 'function') {
|
||||||
|
var cleanupFn = effect({
|
||||||
|
state: state,
|
||||||
|
name: name,
|
||||||
|
instance: instance,
|
||||||
|
options: options
|
||||||
|
});
|
||||||
|
|
||||||
|
var noopFn = function noopFn() {};
|
||||||
|
|
||||||
|
effectCleanupFns.push(cleanupFn || noopFn);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function cleanupModifierEffects() {
|
||||||
|
effectCleanupFns.forEach(function (fn) {
|
||||||
|
return fn();
|
||||||
|
});
|
||||||
|
effectCleanupFns = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
return instance;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
export var createPopper = /*#__PURE__*/popperGenerator(); // eslint-disable-next-line import/no-unused-modules
|
||||||
|
|
||||||
|
export { detectOverflow };
|
||||||
23
node_modules/@popperjs/core/dist/esm/dom-utils/contains.js
generated
vendored
Normal file
23
node_modules/@popperjs/core/dist/esm/dom-utils/contains.js
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
import { isShadowRoot } from "./instanceOf.js";
|
||||||
|
export default function contains(parent, child) {
|
||||||
|
var rootNode = child.getRootNode && child.getRootNode(); // First, attempt with faster native method
|
||||||
|
|
||||||
|
if (parent.contains(child)) {
|
||||||
|
return true;
|
||||||
|
} // then fallback to custom implementation with Shadow DOM support
|
||||||
|
else if (rootNode && isShadowRoot(rootNode)) {
|
||||||
|
var next = child;
|
||||||
|
|
||||||
|
do {
|
||||||
|
if (next && parent.isSameNode(next)) {
|
||||||
|
return true;
|
||||||
|
} // $FlowFixMe[prop-missing]: need a better way to handle this...
|
||||||
|
|
||||||
|
|
||||||
|
next = next.parentNode || next.host;
|
||||||
|
} while (next);
|
||||||
|
} // Give up, the result is false
|
||||||
|
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
41
node_modules/@popperjs/core/dist/esm/dom-utils/getBoundingClientRect.js
generated
vendored
Normal file
41
node_modules/@popperjs/core/dist/esm/dom-utils/getBoundingClientRect.js
generated
vendored
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
import { isElement, isHTMLElement } from "./instanceOf.js";
|
||||||
|
import { round } from "../utils/math.js";
|
||||||
|
import getWindow from "./getWindow.js";
|
||||||
|
import isLayoutViewport from "./isLayoutViewport.js";
|
||||||
|
export default function getBoundingClientRect(element, includeScale, isFixedStrategy) {
|
||||||
|
if (includeScale === void 0) {
|
||||||
|
includeScale = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isFixedStrategy === void 0) {
|
||||||
|
isFixedStrategy = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
var clientRect = element.getBoundingClientRect();
|
||||||
|
var scaleX = 1;
|
||||||
|
var scaleY = 1;
|
||||||
|
|
||||||
|
if (includeScale && isHTMLElement(element)) {
|
||||||
|
scaleX = element.offsetWidth > 0 ? round(clientRect.width) / element.offsetWidth || 1 : 1;
|
||||||
|
scaleY = element.offsetHeight > 0 ? round(clientRect.height) / element.offsetHeight || 1 : 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ref = isElement(element) ? getWindow(element) : window,
|
||||||
|
visualViewport = _ref.visualViewport;
|
||||||
|
|
||||||
|
var addVisualOffsets = !isLayoutViewport() && isFixedStrategy;
|
||||||
|
var x = (clientRect.left + (addVisualOffsets && visualViewport ? visualViewport.offsetLeft : 0)) / scaleX;
|
||||||
|
var y = (clientRect.top + (addVisualOffsets && visualViewport ? visualViewport.offsetTop : 0)) / scaleY;
|
||||||
|
var width = clientRect.width / scaleX;
|
||||||
|
var height = clientRect.height / scaleY;
|
||||||
|
return {
|
||||||
|
width: width,
|
||||||
|
height: height,
|
||||||
|
top: y,
|
||||||
|
right: x + width,
|
||||||
|
bottom: y + height,
|
||||||
|
left: x,
|
||||||
|
x: x,
|
||||||
|
y: y
|
||||||
|
};
|
||||||
|
}
|
||||||
70
node_modules/@popperjs/core/dist/esm/dom-utils/getClippingRect.js
generated
vendored
Normal file
70
node_modules/@popperjs/core/dist/esm/dom-utils/getClippingRect.js
generated
vendored
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
import { viewport } from "../enums.js";
|
||||||
|
import getViewportRect from "./getViewportRect.js";
|
||||||
|
import getDocumentRect from "./getDocumentRect.js";
|
||||||
|
import listScrollParents from "./listScrollParents.js";
|
||||||
|
import getOffsetParent from "./getOffsetParent.js";
|
||||||
|
import getDocumentElement from "./getDocumentElement.js";
|
||||||
|
import getComputedStyle from "./getComputedStyle.js";
|
||||||
|
import { isElement, isHTMLElement } from "./instanceOf.js";
|
||||||
|
import getBoundingClientRect from "./getBoundingClientRect.js";
|
||||||
|
import getParentNode from "./getParentNode.js";
|
||||||
|
import contains from "./contains.js";
|
||||||
|
import getNodeName from "./getNodeName.js";
|
||||||
|
import rectToClientRect from "../utils/rectToClientRect.js";
|
||||||
|
import { max, min } from "../utils/math.js";
|
||||||
|
|
||||||
|
function getInnerBoundingClientRect(element, strategy) {
|
||||||
|
var rect = getBoundingClientRect(element, false, strategy === 'fixed');
|
||||||
|
rect.top = rect.top + element.clientTop;
|
||||||
|
rect.left = rect.left + element.clientLeft;
|
||||||
|
rect.bottom = rect.top + element.clientHeight;
|
||||||
|
rect.right = rect.left + element.clientWidth;
|
||||||
|
rect.width = element.clientWidth;
|
||||||
|
rect.height = element.clientHeight;
|
||||||
|
rect.x = rect.left;
|
||||||
|
rect.y = rect.top;
|
||||||
|
return rect;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getClientRectFromMixedType(element, clippingParent, strategy) {
|
||||||
|
return clippingParent === viewport ? rectToClientRect(getViewportRect(element, strategy)) : isElement(clippingParent) ? getInnerBoundingClientRect(clippingParent, strategy) : rectToClientRect(getDocumentRect(getDocumentElement(element)));
|
||||||
|
} // A "clipping parent" is an overflowable container with the characteristic of
|
||||||
|
// clipping (or hiding) overflowing elements with a position different from
|
||||||
|
// `initial`
|
||||||
|
|
||||||
|
|
||||||
|
function getClippingParents(element) {
|
||||||
|
var clippingParents = listScrollParents(getParentNode(element));
|
||||||
|
var canEscapeClipping = ['absolute', 'fixed'].indexOf(getComputedStyle(element).position) >= 0;
|
||||||
|
var clipperElement = canEscapeClipping && isHTMLElement(element) ? getOffsetParent(element) : element;
|
||||||
|
|
||||||
|
if (!isElement(clipperElement)) {
|
||||||
|
return [];
|
||||||
|
} // $FlowFixMe[incompatible-return]: https://github.com/facebook/flow/issues/1414
|
||||||
|
|
||||||
|
|
||||||
|
return clippingParents.filter(function (clippingParent) {
|
||||||
|
return isElement(clippingParent) && contains(clippingParent, clipperElement) && getNodeName(clippingParent) !== 'body';
|
||||||
|
});
|
||||||
|
} // Gets the maximum area that the element is visible in due to any number of
|
||||||
|
// clipping parents
|
||||||
|
|
||||||
|
|
||||||
|
export default function getClippingRect(element, boundary, rootBoundary, strategy) {
|
||||||
|
var mainClippingParents = boundary === 'clippingParents' ? getClippingParents(element) : [].concat(boundary);
|
||||||
|
var clippingParents = [].concat(mainClippingParents, [rootBoundary]);
|
||||||
|
var firstClippingParent = clippingParents[0];
|
||||||
|
var clippingRect = clippingParents.reduce(function (accRect, clippingParent) {
|
||||||
|
var rect = getClientRectFromMixedType(element, clippingParent, strategy);
|
||||||
|
accRect.top = max(rect.top, accRect.top);
|
||||||
|
accRect.right = min(rect.right, accRect.right);
|
||||||
|
accRect.bottom = min(rect.bottom, accRect.bottom);
|
||||||
|
accRect.left = max(rect.left, accRect.left);
|
||||||
|
return accRect;
|
||||||
|
}, getClientRectFromMixedType(element, firstClippingParent, strategy));
|
||||||
|
clippingRect.width = clippingRect.right - clippingRect.left;
|
||||||
|
clippingRect.height = clippingRect.bottom - clippingRect.top;
|
||||||
|
clippingRect.x = clippingRect.left;
|
||||||
|
clippingRect.y = clippingRect.top;
|
||||||
|
return clippingRect;
|
||||||
|
}
|
||||||
58
node_modules/@popperjs/core/dist/esm/dom-utils/getCompositeRect.js
generated
vendored
Normal file
58
node_modules/@popperjs/core/dist/esm/dom-utils/getCompositeRect.js
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
import getBoundingClientRect from "./getBoundingClientRect.js";
|
||||||
|
import getNodeScroll from "./getNodeScroll.js";
|
||||||
|
import getNodeName from "./getNodeName.js";
|
||||||
|
import { isHTMLElement } from "./instanceOf.js";
|
||||||
|
import getWindowScrollBarX from "./getWindowScrollBarX.js";
|
||||||
|
import getDocumentElement from "./getDocumentElement.js";
|
||||||
|
import isScrollParent from "./isScrollParent.js";
|
||||||
|
import { round } from "../utils/math.js";
|
||||||
|
|
||||||
|
function isElementScaled(element) {
|
||||||
|
var rect = element.getBoundingClientRect();
|
||||||
|
var scaleX = round(rect.width) / element.offsetWidth || 1;
|
||||||
|
var scaleY = round(rect.height) / element.offsetHeight || 1;
|
||||||
|
return scaleX !== 1 || scaleY !== 1;
|
||||||
|
} // Returns the composite rect of an element relative to its offsetParent.
|
||||||
|
// Composite means it takes into account transforms as well as layout.
|
||||||
|
|
||||||
|
|
||||||
|
export default function getCompositeRect(elementOrVirtualElement, offsetParent, isFixed) {
|
||||||
|
if (isFixed === void 0) {
|
||||||
|
isFixed = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
var isOffsetParentAnElement = isHTMLElement(offsetParent);
|
||||||
|
var offsetParentIsScaled = isHTMLElement(offsetParent) && isElementScaled(offsetParent);
|
||||||
|
var documentElement = getDocumentElement(offsetParent);
|
||||||
|
var rect = getBoundingClientRect(elementOrVirtualElement, offsetParentIsScaled, isFixed);
|
||||||
|
var scroll = {
|
||||||
|
scrollLeft: 0,
|
||||||
|
scrollTop: 0
|
||||||
|
};
|
||||||
|
var offsets = {
|
||||||
|
x: 0,
|
||||||
|
y: 0
|
||||||
|
};
|
||||||
|
|
||||||
|
if (isOffsetParentAnElement || !isOffsetParentAnElement && !isFixed) {
|
||||||
|
if (getNodeName(offsetParent) !== 'body' || // https://github.com/popperjs/popper-core/issues/1078
|
||||||
|
isScrollParent(documentElement)) {
|
||||||
|
scroll = getNodeScroll(offsetParent);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isHTMLElement(offsetParent)) {
|
||||||
|
offsets = getBoundingClientRect(offsetParent, true);
|
||||||
|
offsets.x += offsetParent.clientLeft;
|
||||||
|
offsets.y += offsetParent.clientTop;
|
||||||
|
} else if (documentElement) {
|
||||||
|
offsets.x = getWindowScrollBarX(documentElement);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
x: rect.left + scroll.scrollLeft - offsets.x,
|
||||||
|
y: rect.top + scroll.scrollTop - offsets.y,
|
||||||
|
width: rect.width,
|
||||||
|
height: rect.height
|
||||||
|
};
|
||||||
|
}
|
||||||
4
node_modules/@popperjs/core/dist/esm/dom-utils/getComputedStyle.js
generated
vendored
Normal file
4
node_modules/@popperjs/core/dist/esm/dom-utils/getComputedStyle.js
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
import getWindow from "./getWindow.js";
|
||||||
|
export default function getComputedStyle(element) {
|
||||||
|
return getWindow(element).getComputedStyle(element);
|
||||||
|
}
|
||||||
6
node_modules/@popperjs/core/dist/esm/dom-utils/getDocumentElement.js
generated
vendored
Normal file
6
node_modules/@popperjs/core/dist/esm/dom-utils/getDocumentElement.js
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
import { isElement } from "./instanceOf.js";
|
||||||
|
export default function getDocumentElement(element) {
|
||||||
|
// $FlowFixMe[incompatible-return]: assume body is always available
|
||||||
|
return ((isElement(element) ? element.ownerDocument : // $FlowFixMe[prop-missing]
|
||||||
|
element.document) || window.document).documentElement;
|
||||||
|
}
|
||||||
29
node_modules/@popperjs/core/dist/esm/dom-utils/getDocumentRect.js
generated
vendored
Normal file
29
node_modules/@popperjs/core/dist/esm/dom-utils/getDocumentRect.js
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
import getDocumentElement from "./getDocumentElement.js";
|
||||||
|
import getComputedStyle from "./getComputedStyle.js";
|
||||||
|
import getWindowScrollBarX from "./getWindowScrollBarX.js";
|
||||||
|
import getWindowScroll from "./getWindowScroll.js";
|
||||||
|
import { max } from "../utils/math.js"; // Gets the entire size of the scrollable document area, even extending outside
|
||||||
|
// of the `<html>` and `<body>` rect bounds if horizontally scrollable
|
||||||
|
|
||||||
|
export default function getDocumentRect(element) {
|
||||||
|
var _element$ownerDocumen;
|
||||||
|
|
||||||
|
var html = getDocumentElement(element);
|
||||||
|
var winScroll = getWindowScroll(element);
|
||||||
|
var body = (_element$ownerDocumen = element.ownerDocument) == null ? void 0 : _element$ownerDocumen.body;
|
||||||
|
var width = max(html.scrollWidth, html.clientWidth, body ? body.scrollWidth : 0, body ? body.clientWidth : 0);
|
||||||
|
var height = max(html.scrollHeight, html.clientHeight, body ? body.scrollHeight : 0, body ? body.clientHeight : 0);
|
||||||
|
var x = -winScroll.scrollLeft + getWindowScrollBarX(element);
|
||||||
|
var y = -winScroll.scrollTop;
|
||||||
|
|
||||||
|
if (getComputedStyle(body || html).direction === 'rtl') {
|
||||||
|
x += max(html.clientWidth, body ? body.clientWidth : 0) - width;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
width: width,
|
||||||
|
height: height,
|
||||||
|
x: x,
|
||||||
|
y: y
|
||||||
|
};
|
||||||
|
}
|
||||||
6
node_modules/@popperjs/core/dist/esm/dom-utils/getHTMLElementScroll.js
generated
vendored
Normal file
6
node_modules/@popperjs/core/dist/esm/dom-utils/getHTMLElementScroll.js
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
export default function getHTMLElementScroll(element) {
|
||||||
|
return {
|
||||||
|
scrollLeft: element.scrollLeft,
|
||||||
|
scrollTop: element.scrollTop
|
||||||
|
};
|
||||||
|
}
|
||||||
25
node_modules/@popperjs/core/dist/esm/dom-utils/getLayoutRect.js
generated
vendored
Normal file
25
node_modules/@popperjs/core/dist/esm/dom-utils/getLayoutRect.js
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
import getBoundingClientRect from "./getBoundingClientRect.js"; // Returns the layout rect of an element relative to its offsetParent. Layout
|
||||||
|
// means it doesn't take into account transforms.
|
||||||
|
|
||||||
|
export default function getLayoutRect(element) {
|
||||||
|
var clientRect = getBoundingClientRect(element); // Use the clientRect sizes if it's not been transformed.
|
||||||
|
// Fixes https://github.com/popperjs/popper-core/issues/1223
|
||||||
|
|
||||||
|
var width = element.offsetWidth;
|
||||||
|
var height = element.offsetHeight;
|
||||||
|
|
||||||
|
if (Math.abs(clientRect.width - width) <= 1) {
|
||||||
|
width = clientRect.width;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Math.abs(clientRect.height - height) <= 1) {
|
||||||
|
height = clientRect.height;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
x: element.offsetLeft,
|
||||||
|
y: element.offsetTop,
|
||||||
|
width: width,
|
||||||
|
height: height
|
||||||
|
};
|
||||||
|
}
|
||||||
3
node_modules/@popperjs/core/dist/esm/dom-utils/getNodeName.js
generated
vendored
Normal file
3
node_modules/@popperjs/core/dist/esm/dom-utils/getNodeName.js
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
export default function getNodeName(element) {
|
||||||
|
return element ? (element.nodeName || '').toLowerCase() : null;
|
||||||
|
}
|
||||||
11
node_modules/@popperjs/core/dist/esm/dom-utils/getNodeScroll.js
generated
vendored
Normal file
11
node_modules/@popperjs/core/dist/esm/dom-utils/getNodeScroll.js
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
import getWindowScroll from "./getWindowScroll.js";
|
||||||
|
import getWindow from "./getWindow.js";
|
||||||
|
import { isHTMLElement } from "./instanceOf.js";
|
||||||
|
import getHTMLElementScroll from "./getHTMLElementScroll.js";
|
||||||
|
export default function getNodeScroll(node) {
|
||||||
|
if (node === getWindow(node) || !isHTMLElement(node)) {
|
||||||
|
return getWindowScroll(node);
|
||||||
|
} else {
|
||||||
|
return getHTMLElementScroll(node);
|
||||||
|
}
|
||||||
|
}
|
||||||
69
node_modules/@popperjs/core/dist/esm/dom-utils/getOffsetParent.js
generated
vendored
Normal file
69
node_modules/@popperjs/core/dist/esm/dom-utils/getOffsetParent.js
generated
vendored
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
import getWindow from "./getWindow.js";
|
||||||
|
import getNodeName from "./getNodeName.js";
|
||||||
|
import getComputedStyle from "./getComputedStyle.js";
|
||||||
|
import { isHTMLElement, isShadowRoot } from "./instanceOf.js";
|
||||||
|
import isTableElement from "./isTableElement.js";
|
||||||
|
import getParentNode from "./getParentNode.js";
|
||||||
|
import getUAString from "../utils/userAgent.js";
|
||||||
|
|
||||||
|
function getTrueOffsetParent(element) {
|
||||||
|
if (!isHTMLElement(element) || // https://github.com/popperjs/popper-core/issues/837
|
||||||
|
getComputedStyle(element).position === 'fixed') {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return element.offsetParent;
|
||||||
|
} // `.offsetParent` reports `null` for fixed elements, while absolute elements
|
||||||
|
// return the containing block
|
||||||
|
|
||||||
|
|
||||||
|
function getContainingBlock(element) {
|
||||||
|
var isFirefox = /firefox/i.test(getUAString());
|
||||||
|
var isIE = /Trident/i.test(getUAString());
|
||||||
|
|
||||||
|
if (isIE && isHTMLElement(element)) {
|
||||||
|
// In IE 9, 10 and 11 fixed elements containing block is always established by the viewport
|
||||||
|
var elementCss = getComputedStyle(element);
|
||||||
|
|
||||||
|
if (elementCss.position === 'fixed') {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var currentNode = getParentNode(element);
|
||||||
|
|
||||||
|
if (isShadowRoot(currentNode)) {
|
||||||
|
currentNode = currentNode.host;
|
||||||
|
}
|
||||||
|
|
||||||
|
while (isHTMLElement(currentNode) && ['html', 'body'].indexOf(getNodeName(currentNode)) < 0) {
|
||||||
|
var css = getComputedStyle(currentNode); // This is non-exhaustive but covers the most common CSS properties that
|
||||||
|
// create a containing block.
|
||||||
|
// https://developer.mozilla.org/en-US/docs/Web/CSS/Containing_block#identifying_the_containing_block
|
||||||
|
|
||||||
|
if (css.transform !== 'none' || css.perspective !== 'none' || css.contain === 'paint' || ['transform', 'perspective'].indexOf(css.willChange) !== -1 || isFirefox && css.willChange === 'filter' || isFirefox && css.filter && css.filter !== 'none') {
|
||||||
|
return currentNode;
|
||||||
|
} else {
|
||||||
|
currentNode = currentNode.parentNode;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
} // Gets the closest ancestor positioned element. Handles some edge cases,
|
||||||
|
// such as table ancestors and cross browser bugs.
|
||||||
|
|
||||||
|
|
||||||
|
export default function getOffsetParent(element) {
|
||||||
|
var window = getWindow(element);
|
||||||
|
var offsetParent = getTrueOffsetParent(element);
|
||||||
|
|
||||||
|
while (offsetParent && isTableElement(offsetParent) && getComputedStyle(offsetParent).position === 'static') {
|
||||||
|
offsetParent = getTrueOffsetParent(offsetParent);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (offsetParent && (getNodeName(offsetParent) === 'html' || getNodeName(offsetParent) === 'body' && getComputedStyle(offsetParent).position === 'static')) {
|
||||||
|
return window;
|
||||||
|
}
|
||||||
|
|
||||||
|
return offsetParent || getContainingBlock(element) || window;
|
||||||
|
}
|
||||||
19
node_modules/@popperjs/core/dist/esm/dom-utils/getParentNode.js
generated
vendored
Normal file
19
node_modules/@popperjs/core/dist/esm/dom-utils/getParentNode.js
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
import getNodeName from "./getNodeName.js";
|
||||||
|
import getDocumentElement from "./getDocumentElement.js";
|
||||||
|
import { isShadowRoot } from "./instanceOf.js";
|
||||||
|
export default function getParentNode(element) {
|
||||||
|
if (getNodeName(element) === 'html') {
|
||||||
|
return element;
|
||||||
|
}
|
||||||
|
|
||||||
|
return (// this is a quicker (but less type safe) way to save quite some bytes from the bundle
|
||||||
|
// $FlowFixMe[incompatible-return]
|
||||||
|
// $FlowFixMe[prop-missing]
|
||||||
|
element.assignedSlot || // step into the shadow DOM of the parent of a slotted node
|
||||||
|
element.parentNode || ( // DOM Element detected
|
||||||
|
isShadowRoot(element) ? element.host : null) || // ShadowRoot detected
|
||||||
|
// $FlowFixMe[incompatible-call]: HTMLElement is a Node
|
||||||
|
getDocumentElement(element) // fallback
|
||||||
|
|
||||||
|
);
|
||||||
|
}
|
||||||
16
node_modules/@popperjs/core/dist/esm/dom-utils/getScrollParent.js
generated
vendored
Normal file
16
node_modules/@popperjs/core/dist/esm/dom-utils/getScrollParent.js
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
import getParentNode from "./getParentNode.js";
|
||||||
|
import isScrollParent from "./isScrollParent.js";
|
||||||
|
import getNodeName from "./getNodeName.js";
|
||||||
|
import { isHTMLElement } from "./instanceOf.js";
|
||||||
|
export default function getScrollParent(node) {
|
||||||
|
if (['html', 'body', '#document'].indexOf(getNodeName(node)) >= 0) {
|
||||||
|
// $FlowFixMe[incompatible-return]: assume body is always available
|
||||||
|
return node.ownerDocument.body;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isHTMLElement(node) && isScrollParent(node)) {
|
||||||
|
return node;
|
||||||
|
}
|
||||||
|
|
||||||
|
return getScrollParent(getParentNode(node));
|
||||||
|
}
|
||||||
31
node_modules/@popperjs/core/dist/esm/dom-utils/getViewportRect.js
generated
vendored
Normal file
31
node_modules/@popperjs/core/dist/esm/dom-utils/getViewportRect.js
generated
vendored
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
import getWindow from "./getWindow.js";
|
||||||
|
import getDocumentElement from "./getDocumentElement.js";
|
||||||
|
import getWindowScrollBarX from "./getWindowScrollBarX.js";
|
||||||
|
import isLayoutViewport from "./isLayoutViewport.js";
|
||||||
|
export default function getViewportRect(element, strategy) {
|
||||||
|
var win = getWindow(element);
|
||||||
|
var html = getDocumentElement(element);
|
||||||
|
var visualViewport = win.visualViewport;
|
||||||
|
var width = html.clientWidth;
|
||||||
|
var height = html.clientHeight;
|
||||||
|
var x = 0;
|
||||||
|
var y = 0;
|
||||||
|
|
||||||
|
if (visualViewport) {
|
||||||
|
width = visualViewport.width;
|
||||||
|
height = visualViewport.height;
|
||||||
|
var layoutViewport = isLayoutViewport();
|
||||||
|
|
||||||
|
if (layoutViewport || !layoutViewport && strategy === 'fixed') {
|
||||||
|
x = visualViewport.offsetLeft;
|
||||||
|
y = visualViewport.offsetTop;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
width: width,
|
||||||
|
height: height,
|
||||||
|
x: x + getWindowScrollBarX(element),
|
||||||
|
y: y
|
||||||
|
};
|
||||||
|
}
|
||||||
12
node_modules/@popperjs/core/dist/esm/dom-utils/getWindow.js
generated
vendored
Normal file
12
node_modules/@popperjs/core/dist/esm/dom-utils/getWindow.js
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
export default function getWindow(node) {
|
||||||
|
if (node == null) {
|
||||||
|
return window;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (node.toString() !== '[object Window]') {
|
||||||
|
var ownerDocument = node.ownerDocument;
|
||||||
|
return ownerDocument ? ownerDocument.defaultView || window : window;
|
||||||
|
}
|
||||||
|
|
||||||
|
return node;
|
||||||
|
}
|
||||||
10
node_modules/@popperjs/core/dist/esm/dom-utils/getWindowScroll.js
generated
vendored
Normal file
10
node_modules/@popperjs/core/dist/esm/dom-utils/getWindowScroll.js
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
import getWindow from "./getWindow.js";
|
||||||
|
export default function getWindowScroll(node) {
|
||||||
|
var win = getWindow(node);
|
||||||
|
var scrollLeft = win.pageXOffset;
|
||||||
|
var scrollTop = win.pageYOffset;
|
||||||
|
return {
|
||||||
|
scrollLeft: scrollLeft,
|
||||||
|
scrollTop: scrollTop
|
||||||
|
};
|
||||||
|
}
|
||||||
13
node_modules/@popperjs/core/dist/esm/dom-utils/getWindowScrollBarX.js
generated
vendored
Normal file
13
node_modules/@popperjs/core/dist/esm/dom-utils/getWindowScrollBarX.js
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
import getBoundingClientRect from "./getBoundingClientRect.js";
|
||||||
|
import getDocumentElement from "./getDocumentElement.js";
|
||||||
|
import getWindowScroll from "./getWindowScroll.js";
|
||||||
|
export default function getWindowScrollBarX(element) {
|
||||||
|
// If <html> has a CSS width greater than the viewport, then this will be
|
||||||
|
// incorrect for RTL.
|
||||||
|
// Popper 1 is broken in this case and never had a bug report so let's assume
|
||||||
|
// it's not an issue. I don't think anyone ever specifies width on <html>
|
||||||
|
// anyway.
|
||||||
|
// Browsers where the left scrollbar doesn't cause an issue report `0` for
|
||||||
|
// this (e.g. Edge 2019, IE11, Safari)
|
||||||
|
return getBoundingClientRect(getDocumentElement(element)).left + getWindowScroll(element).scrollLeft;
|
||||||
|
}
|
||||||
23
node_modules/@popperjs/core/dist/esm/dom-utils/instanceOf.js
generated
vendored
Normal file
23
node_modules/@popperjs/core/dist/esm/dom-utils/instanceOf.js
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
import getWindow from "./getWindow.js";
|
||||||
|
|
||||||
|
function isElement(node) {
|
||||||
|
var OwnElement = getWindow(node).Element;
|
||||||
|
return node instanceof OwnElement || node instanceof Element;
|
||||||
|
}
|
||||||
|
|
||||||
|
function isHTMLElement(node) {
|
||||||
|
var OwnElement = getWindow(node).HTMLElement;
|
||||||
|
return node instanceof OwnElement || node instanceof HTMLElement;
|
||||||
|
}
|
||||||
|
|
||||||
|
function isShadowRoot(node) {
|
||||||
|
// IE 11 has no ShadowRoot
|
||||||
|
if (typeof ShadowRoot === 'undefined') {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
var OwnElement = getWindow(node).ShadowRoot;
|
||||||
|
return node instanceof OwnElement || node instanceof ShadowRoot;
|
||||||
|
}
|
||||||
|
|
||||||
|
export { isElement, isHTMLElement, isShadowRoot };
|
||||||
4
node_modules/@popperjs/core/dist/esm/dom-utils/isLayoutViewport.js
generated
vendored
Normal file
4
node_modules/@popperjs/core/dist/esm/dom-utils/isLayoutViewport.js
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
import getUAString from "../utils/userAgent.js";
|
||||||
|
export default function isLayoutViewport() {
|
||||||
|
return !/^((?!chrome|android).)*safari/i.test(getUAString());
|
||||||
|
}
|
||||||
10
node_modules/@popperjs/core/dist/esm/dom-utils/isScrollParent.js
generated
vendored
Normal file
10
node_modules/@popperjs/core/dist/esm/dom-utils/isScrollParent.js
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
import getComputedStyle from "./getComputedStyle.js";
|
||||||
|
export default function isScrollParent(element) {
|
||||||
|
// Firefox wants us to check `-x` and `-y` variations as well
|
||||||
|
var _getComputedStyle = getComputedStyle(element),
|
||||||
|
overflow = _getComputedStyle.overflow,
|
||||||
|
overflowX = _getComputedStyle.overflowX,
|
||||||
|
overflowY = _getComputedStyle.overflowY;
|
||||||
|
|
||||||
|
return /auto|scroll|overlay|hidden/.test(overflow + overflowY + overflowX);
|
||||||
|
}
|
||||||
4
node_modules/@popperjs/core/dist/esm/dom-utils/isTableElement.js
generated
vendored
Normal file
4
node_modules/@popperjs/core/dist/esm/dom-utils/isTableElement.js
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
import getNodeName from "./getNodeName.js";
|
||||||
|
export default function isTableElement(element) {
|
||||||
|
return ['table', 'td', 'th'].indexOf(getNodeName(element)) >= 0;
|
||||||
|
}
|
||||||
26
node_modules/@popperjs/core/dist/esm/dom-utils/listScrollParents.js
generated
vendored
Normal file
26
node_modules/@popperjs/core/dist/esm/dom-utils/listScrollParents.js
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
import getScrollParent from "./getScrollParent.js";
|
||||||
|
import getParentNode from "./getParentNode.js";
|
||||||
|
import getWindow from "./getWindow.js";
|
||||||
|
import isScrollParent from "./isScrollParent.js";
|
||||||
|
/*
|
||||||
|
given a DOM element, return the list of all scroll parents, up the list of ancesors
|
||||||
|
until we get to the top window object. This list is what we attach scroll listeners
|
||||||
|
to, because if any of these parent elements scroll, we'll need to re-calculate the
|
||||||
|
reference element's position.
|
||||||
|
*/
|
||||||
|
|
||||||
|
export default function listScrollParents(element, list) {
|
||||||
|
var _element$ownerDocumen;
|
||||||
|
|
||||||
|
if (list === void 0) {
|
||||||
|
list = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
var scrollParent = getScrollParent(element);
|
||||||
|
var isBody = scrollParent === ((_element$ownerDocumen = element.ownerDocument) == null ? void 0 : _element$ownerDocumen.body);
|
||||||
|
var win = getWindow(scrollParent);
|
||||||
|
var target = isBody ? [win].concat(win.visualViewport || [], isScrollParent(scrollParent) ? scrollParent : []) : scrollParent;
|
||||||
|
var updatedList = list.concat(target);
|
||||||
|
return isBody ? updatedList : // $FlowFixMe[incompatible-call]: isBody tells us target will be an HTMLElement here
|
||||||
|
updatedList.concat(listScrollParents(getParentNode(target)));
|
||||||
|
}
|
||||||
31
node_modules/@popperjs/core/dist/esm/enums.js
generated
vendored
Normal file
31
node_modules/@popperjs/core/dist/esm/enums.js
generated
vendored
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
export var top = 'top';
|
||||||
|
export var bottom = 'bottom';
|
||||||
|
export var right = 'right';
|
||||||
|
export var left = 'left';
|
||||||
|
export var auto = 'auto';
|
||||||
|
export var basePlacements = [top, bottom, right, left];
|
||||||
|
export var start = 'start';
|
||||||
|
export var end = 'end';
|
||||||
|
export var clippingParents = 'clippingParents';
|
||||||
|
export var viewport = 'viewport';
|
||||||
|
export var popper = 'popper';
|
||||||
|
export var reference = 'reference';
|
||||||
|
export var variationPlacements = /*#__PURE__*/basePlacements.reduce(function (acc, placement) {
|
||||||
|
return acc.concat([placement + "-" + start, placement + "-" + end]);
|
||||||
|
}, []);
|
||||||
|
export var placements = /*#__PURE__*/[].concat(basePlacements, [auto]).reduce(function (acc, placement) {
|
||||||
|
return acc.concat([placement, placement + "-" + start, placement + "-" + end]);
|
||||||
|
}, []); // modifiers that need to read the DOM
|
||||||
|
|
||||||
|
export var beforeRead = 'beforeRead';
|
||||||
|
export var read = 'read';
|
||||||
|
export var afterRead = 'afterRead'; // pure-logic modifiers
|
||||||
|
|
||||||
|
export var beforeMain = 'beforeMain';
|
||||||
|
export var main = 'main';
|
||||||
|
export var afterMain = 'afterMain'; // modifier with the purpose to write to the DOM (or write into a framework state)
|
||||||
|
|
||||||
|
export var beforeWrite = 'beforeWrite';
|
||||||
|
export var write = 'write';
|
||||||
|
export var afterWrite = 'afterWrite';
|
||||||
|
export var modifierPhases = [beforeRead, read, afterRead, beforeMain, main, afterMain, beforeWrite, write, afterWrite];
|
||||||
8
node_modules/@popperjs/core/dist/esm/index.js
generated
vendored
Normal file
8
node_modules/@popperjs/core/dist/esm/index.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
export * from "./enums.js";
|
||||||
|
export * from "./modifiers/index.js"; // eslint-disable-next-line import/no-unused-modules
|
||||||
|
|
||||||
|
export { popperGenerator, detectOverflow, createPopper as createPopperBase } from "./createPopper.js"; // eslint-disable-next-line import/no-unused-modules
|
||||||
|
|
||||||
|
export { createPopper } from "./popper.js"; // eslint-disable-next-line import/no-unused-modules
|
||||||
|
|
||||||
|
export { createPopper as createPopperLite } from "./popper-lite.js";
|
||||||
84
node_modules/@popperjs/core/dist/esm/modifiers/applyStyles.js
generated
vendored
Normal file
84
node_modules/@popperjs/core/dist/esm/modifiers/applyStyles.js
generated
vendored
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
import getNodeName from "../dom-utils/getNodeName.js";
|
||||||
|
import { isHTMLElement } from "../dom-utils/instanceOf.js"; // This modifier takes the styles prepared by the `computeStyles` modifier
|
||||||
|
// and applies them to the HTMLElements such as popper and arrow
|
||||||
|
|
||||||
|
function applyStyles(_ref) {
|
||||||
|
var state = _ref.state;
|
||||||
|
Object.keys(state.elements).forEach(function (name) {
|
||||||
|
var style = state.styles[name] || {};
|
||||||
|
var attributes = state.attributes[name] || {};
|
||||||
|
var element = state.elements[name]; // arrow is optional + virtual elements
|
||||||
|
|
||||||
|
if (!isHTMLElement(element) || !getNodeName(element)) {
|
||||||
|
return;
|
||||||
|
} // Flow doesn't support to extend this property, but it's the most
|
||||||
|
// effective way to apply styles to an HTMLElement
|
||||||
|
// $FlowFixMe[cannot-write]
|
||||||
|
|
||||||
|
|
||||||
|
Object.assign(element.style, style);
|
||||||
|
Object.keys(attributes).forEach(function (name) {
|
||||||
|
var value = attributes[name];
|
||||||
|
|
||||||
|
if (value === false) {
|
||||||
|
element.removeAttribute(name);
|
||||||
|
} else {
|
||||||
|
element.setAttribute(name, value === true ? '' : value);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function effect(_ref2) {
|
||||||
|
var state = _ref2.state;
|
||||||
|
var initialStyles = {
|
||||||
|
popper: {
|
||||||
|
position: state.options.strategy,
|
||||||
|
left: '0',
|
||||||
|
top: '0',
|
||||||
|
margin: '0'
|
||||||
|
},
|
||||||
|
arrow: {
|
||||||
|
position: 'absolute'
|
||||||
|
},
|
||||||
|
reference: {}
|
||||||
|
};
|
||||||
|
Object.assign(state.elements.popper.style, initialStyles.popper);
|
||||||
|
state.styles = initialStyles;
|
||||||
|
|
||||||
|
if (state.elements.arrow) {
|
||||||
|
Object.assign(state.elements.arrow.style, initialStyles.arrow);
|
||||||
|
}
|
||||||
|
|
||||||
|
return function () {
|
||||||
|
Object.keys(state.elements).forEach(function (name) {
|
||||||
|
var element = state.elements[name];
|
||||||
|
var attributes = state.attributes[name] || {};
|
||||||
|
var styleProperties = Object.keys(state.styles.hasOwnProperty(name) ? state.styles[name] : initialStyles[name]); // Set all values to an empty string to unset them
|
||||||
|
|
||||||
|
var style = styleProperties.reduce(function (style, property) {
|
||||||
|
style[property] = '';
|
||||||
|
return style;
|
||||||
|
}, {}); // arrow is optional + virtual elements
|
||||||
|
|
||||||
|
if (!isHTMLElement(element) || !getNodeName(element)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
Object.assign(element.style, style);
|
||||||
|
Object.keys(attributes).forEach(function (attribute) {
|
||||||
|
element.removeAttribute(attribute);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
};
|
||||||
|
} // eslint-disable-next-line import/no-unused-modules
|
||||||
|
|
||||||
|
|
||||||
|
export default {
|
||||||
|
name: 'applyStyles',
|
||||||
|
enabled: true,
|
||||||
|
phase: 'write',
|
||||||
|
fn: applyStyles,
|
||||||
|
effect: effect,
|
||||||
|
requires: ['computeStyles']
|
||||||
|
};
|
||||||
90
node_modules/@popperjs/core/dist/esm/modifiers/arrow.js
generated
vendored
Normal file
90
node_modules/@popperjs/core/dist/esm/modifiers/arrow.js
generated
vendored
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
import getBasePlacement from "../utils/getBasePlacement.js";
|
||||||
|
import getLayoutRect from "../dom-utils/getLayoutRect.js";
|
||||||
|
import contains from "../dom-utils/contains.js";
|
||||||
|
import getOffsetParent from "../dom-utils/getOffsetParent.js";
|
||||||
|
import getMainAxisFromPlacement from "../utils/getMainAxisFromPlacement.js";
|
||||||
|
import { within } from "../utils/within.js";
|
||||||
|
import mergePaddingObject from "../utils/mergePaddingObject.js";
|
||||||
|
import expandToHashMap from "../utils/expandToHashMap.js";
|
||||||
|
import { left, right, basePlacements, top, bottom } from "../enums.js"; // eslint-disable-next-line import/no-unused-modules
|
||||||
|
|
||||||
|
var toPaddingObject = function toPaddingObject(padding, state) {
|
||||||
|
padding = typeof padding === 'function' ? padding(Object.assign({}, state.rects, {
|
||||||
|
placement: state.placement
|
||||||
|
})) : padding;
|
||||||
|
return mergePaddingObject(typeof padding !== 'number' ? padding : expandToHashMap(padding, basePlacements));
|
||||||
|
};
|
||||||
|
|
||||||
|
function arrow(_ref) {
|
||||||
|
var _state$modifiersData$;
|
||||||
|
|
||||||
|
var state = _ref.state,
|
||||||
|
name = _ref.name,
|
||||||
|
options = _ref.options;
|
||||||
|
var arrowElement = state.elements.arrow;
|
||||||
|
var popperOffsets = state.modifiersData.popperOffsets;
|
||||||
|
var basePlacement = getBasePlacement(state.placement);
|
||||||
|
var axis = getMainAxisFromPlacement(basePlacement);
|
||||||
|
var isVertical = [left, right].indexOf(basePlacement) >= 0;
|
||||||
|
var len = isVertical ? 'height' : 'width';
|
||||||
|
|
||||||
|
if (!arrowElement || !popperOffsets) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var paddingObject = toPaddingObject(options.padding, state);
|
||||||
|
var arrowRect = getLayoutRect(arrowElement);
|
||||||
|
var minProp = axis === 'y' ? top : left;
|
||||||
|
var maxProp = axis === 'y' ? bottom : right;
|
||||||
|
var endDiff = state.rects.reference[len] + state.rects.reference[axis] - popperOffsets[axis] - state.rects.popper[len];
|
||||||
|
var startDiff = popperOffsets[axis] - state.rects.reference[axis];
|
||||||
|
var arrowOffsetParent = getOffsetParent(arrowElement);
|
||||||
|
var clientSize = arrowOffsetParent ? axis === 'y' ? arrowOffsetParent.clientHeight || 0 : arrowOffsetParent.clientWidth || 0 : 0;
|
||||||
|
var centerToReference = endDiff / 2 - startDiff / 2; // Make sure the arrow doesn't overflow the popper if the center point is
|
||||||
|
// outside of the popper bounds
|
||||||
|
|
||||||
|
var min = paddingObject[minProp];
|
||||||
|
var max = clientSize - arrowRect[len] - paddingObject[maxProp];
|
||||||
|
var center = clientSize / 2 - arrowRect[len] / 2 + centerToReference;
|
||||||
|
var offset = within(min, center, max); // Prevents breaking syntax highlighting...
|
||||||
|
|
||||||
|
var axisProp = axis;
|
||||||
|
state.modifiersData[name] = (_state$modifiersData$ = {}, _state$modifiersData$[axisProp] = offset, _state$modifiersData$.centerOffset = offset - center, _state$modifiersData$);
|
||||||
|
}
|
||||||
|
|
||||||
|
function effect(_ref2) {
|
||||||
|
var state = _ref2.state,
|
||||||
|
options = _ref2.options;
|
||||||
|
var _options$element = options.element,
|
||||||
|
arrowElement = _options$element === void 0 ? '[data-popper-arrow]' : _options$element;
|
||||||
|
|
||||||
|
if (arrowElement == null) {
|
||||||
|
return;
|
||||||
|
} // CSS selector
|
||||||
|
|
||||||
|
|
||||||
|
if (typeof arrowElement === 'string') {
|
||||||
|
arrowElement = state.elements.popper.querySelector(arrowElement);
|
||||||
|
|
||||||
|
if (!arrowElement) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!contains(state.elements.popper, arrowElement)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
state.elements.arrow = arrowElement;
|
||||||
|
} // eslint-disable-next-line import/no-unused-modules
|
||||||
|
|
||||||
|
|
||||||
|
export default {
|
||||||
|
name: 'arrow',
|
||||||
|
enabled: true,
|
||||||
|
phase: 'main',
|
||||||
|
fn: arrow,
|
||||||
|
effect: effect,
|
||||||
|
requires: ['popperOffsets'],
|
||||||
|
requiresIfExists: ['preventOverflow']
|
||||||
|
};
|
||||||
169
node_modules/@popperjs/core/dist/esm/modifiers/computeStyles.js
generated
vendored
Normal file
169
node_modules/@popperjs/core/dist/esm/modifiers/computeStyles.js
generated
vendored
Normal file
@@ -0,0 +1,169 @@
|
|||||||
|
import { top, left, right, bottom, end } from "../enums.js";
|
||||||
|
import getOffsetParent from "../dom-utils/getOffsetParent.js";
|
||||||
|
import getWindow from "../dom-utils/getWindow.js";
|
||||||
|
import getDocumentElement from "../dom-utils/getDocumentElement.js";
|
||||||
|
import getComputedStyle from "../dom-utils/getComputedStyle.js";
|
||||||
|
import getBasePlacement from "../utils/getBasePlacement.js";
|
||||||
|
import getVariation from "../utils/getVariation.js";
|
||||||
|
import { round } from "../utils/math.js"; // eslint-disable-next-line import/no-unused-modules
|
||||||
|
|
||||||
|
var unsetSides = {
|
||||||
|
top: 'auto',
|
||||||
|
right: 'auto',
|
||||||
|
bottom: 'auto',
|
||||||
|
left: 'auto'
|
||||||
|
}; // Round the offsets to the nearest suitable subpixel based on the DPR.
|
||||||
|
// Zooming can change the DPR, but it seems to report a value that will
|
||||||
|
// cleanly divide the values into the appropriate subpixels.
|
||||||
|
|
||||||
|
function roundOffsetsByDPR(_ref, win) {
|
||||||
|
var x = _ref.x,
|
||||||
|
y = _ref.y;
|
||||||
|
var dpr = win.devicePixelRatio || 1;
|
||||||
|
return {
|
||||||
|
x: round(x * dpr) / dpr || 0,
|
||||||
|
y: round(y * dpr) / dpr || 0
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export function mapToStyles(_ref2) {
|
||||||
|
var _Object$assign2;
|
||||||
|
|
||||||
|
var popper = _ref2.popper,
|
||||||
|
popperRect = _ref2.popperRect,
|
||||||
|
placement = _ref2.placement,
|
||||||
|
variation = _ref2.variation,
|
||||||
|
offsets = _ref2.offsets,
|
||||||
|
position = _ref2.position,
|
||||||
|
gpuAcceleration = _ref2.gpuAcceleration,
|
||||||
|
adaptive = _ref2.adaptive,
|
||||||
|
roundOffsets = _ref2.roundOffsets,
|
||||||
|
isFixed = _ref2.isFixed;
|
||||||
|
var _offsets$x = offsets.x,
|
||||||
|
x = _offsets$x === void 0 ? 0 : _offsets$x,
|
||||||
|
_offsets$y = offsets.y,
|
||||||
|
y = _offsets$y === void 0 ? 0 : _offsets$y;
|
||||||
|
|
||||||
|
var _ref3 = typeof roundOffsets === 'function' ? roundOffsets({
|
||||||
|
x: x,
|
||||||
|
y: y
|
||||||
|
}) : {
|
||||||
|
x: x,
|
||||||
|
y: y
|
||||||
|
};
|
||||||
|
|
||||||
|
x = _ref3.x;
|
||||||
|
y = _ref3.y;
|
||||||
|
var hasX = offsets.hasOwnProperty('x');
|
||||||
|
var hasY = offsets.hasOwnProperty('y');
|
||||||
|
var sideX = left;
|
||||||
|
var sideY = top;
|
||||||
|
var win = window;
|
||||||
|
|
||||||
|
if (adaptive) {
|
||||||
|
var offsetParent = getOffsetParent(popper);
|
||||||
|
var heightProp = 'clientHeight';
|
||||||
|
var widthProp = 'clientWidth';
|
||||||
|
|
||||||
|
if (offsetParent === getWindow(popper)) {
|
||||||
|
offsetParent = getDocumentElement(popper);
|
||||||
|
|
||||||
|
if (getComputedStyle(offsetParent).position !== 'static' && position === 'absolute') {
|
||||||
|
heightProp = 'scrollHeight';
|
||||||
|
widthProp = 'scrollWidth';
|
||||||
|
}
|
||||||
|
} // $FlowFixMe[incompatible-cast]: force type refinement, we compare offsetParent with window above, but Flow doesn't detect it
|
||||||
|
|
||||||
|
|
||||||
|
offsetParent = offsetParent;
|
||||||
|
|
||||||
|
if (placement === top || (placement === left || placement === right) && variation === end) {
|
||||||
|
sideY = bottom;
|
||||||
|
var offsetY = isFixed && offsetParent === win && win.visualViewport ? win.visualViewport.height : // $FlowFixMe[prop-missing]
|
||||||
|
offsetParent[heightProp];
|
||||||
|
y -= offsetY - popperRect.height;
|
||||||
|
y *= gpuAcceleration ? 1 : -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (placement === left || (placement === top || placement === bottom) && variation === end) {
|
||||||
|
sideX = right;
|
||||||
|
var offsetX = isFixed && offsetParent === win && win.visualViewport ? win.visualViewport.width : // $FlowFixMe[prop-missing]
|
||||||
|
offsetParent[widthProp];
|
||||||
|
x -= offsetX - popperRect.width;
|
||||||
|
x *= gpuAcceleration ? 1 : -1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var commonStyles = Object.assign({
|
||||||
|
position: position
|
||||||
|
}, adaptive && unsetSides);
|
||||||
|
|
||||||
|
var _ref4 = roundOffsets === true ? roundOffsetsByDPR({
|
||||||
|
x: x,
|
||||||
|
y: y
|
||||||
|
}, getWindow(popper)) : {
|
||||||
|
x: x,
|
||||||
|
y: y
|
||||||
|
};
|
||||||
|
|
||||||
|
x = _ref4.x;
|
||||||
|
y = _ref4.y;
|
||||||
|
|
||||||
|
if (gpuAcceleration) {
|
||||||
|
var _Object$assign;
|
||||||
|
|
||||||
|
return Object.assign({}, commonStyles, (_Object$assign = {}, _Object$assign[sideY] = hasY ? '0' : '', _Object$assign[sideX] = hasX ? '0' : '', _Object$assign.transform = (win.devicePixelRatio || 1) <= 1 ? "translate(" + x + "px, " + y + "px)" : "translate3d(" + x + "px, " + y + "px, 0)", _Object$assign));
|
||||||
|
}
|
||||||
|
|
||||||
|
return Object.assign({}, commonStyles, (_Object$assign2 = {}, _Object$assign2[sideY] = hasY ? y + "px" : '', _Object$assign2[sideX] = hasX ? x + "px" : '', _Object$assign2.transform = '', _Object$assign2));
|
||||||
|
}
|
||||||
|
|
||||||
|
function computeStyles(_ref5) {
|
||||||
|
var state = _ref5.state,
|
||||||
|
options = _ref5.options;
|
||||||
|
var _options$gpuAccelerat = options.gpuAcceleration,
|
||||||
|
gpuAcceleration = _options$gpuAccelerat === void 0 ? true : _options$gpuAccelerat,
|
||||||
|
_options$adaptive = options.adaptive,
|
||||||
|
adaptive = _options$adaptive === void 0 ? true : _options$adaptive,
|
||||||
|
_options$roundOffsets = options.roundOffsets,
|
||||||
|
roundOffsets = _options$roundOffsets === void 0 ? true : _options$roundOffsets;
|
||||||
|
var commonStyles = {
|
||||||
|
placement: getBasePlacement(state.placement),
|
||||||
|
variation: getVariation(state.placement),
|
||||||
|
popper: state.elements.popper,
|
||||||
|
popperRect: state.rects.popper,
|
||||||
|
gpuAcceleration: gpuAcceleration,
|
||||||
|
isFixed: state.options.strategy === 'fixed'
|
||||||
|
};
|
||||||
|
|
||||||
|
if (state.modifiersData.popperOffsets != null) {
|
||||||
|
state.styles.popper = Object.assign({}, state.styles.popper, mapToStyles(Object.assign({}, commonStyles, {
|
||||||
|
offsets: state.modifiersData.popperOffsets,
|
||||||
|
position: state.options.strategy,
|
||||||
|
adaptive: adaptive,
|
||||||
|
roundOffsets: roundOffsets
|
||||||
|
})));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (state.modifiersData.arrow != null) {
|
||||||
|
state.styles.arrow = Object.assign({}, state.styles.arrow, mapToStyles(Object.assign({}, commonStyles, {
|
||||||
|
offsets: state.modifiersData.arrow,
|
||||||
|
position: 'absolute',
|
||||||
|
adaptive: false,
|
||||||
|
roundOffsets: roundOffsets
|
||||||
|
})));
|
||||||
|
}
|
||||||
|
|
||||||
|
state.attributes.popper = Object.assign({}, state.attributes.popper, {
|
||||||
|
'data-popper-placement': state.placement
|
||||||
|
});
|
||||||
|
} // eslint-disable-next-line import/no-unused-modules
|
||||||
|
|
||||||
|
|
||||||
|
export default {
|
||||||
|
name: 'computeStyles',
|
||||||
|
enabled: true,
|
||||||
|
phase: 'beforeWrite',
|
||||||
|
fn: computeStyles,
|
||||||
|
data: {}
|
||||||
|
};
|
||||||
49
node_modules/@popperjs/core/dist/esm/modifiers/eventListeners.js
generated
vendored
Normal file
49
node_modules/@popperjs/core/dist/esm/modifiers/eventListeners.js
generated
vendored
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
import getWindow from "../dom-utils/getWindow.js"; // eslint-disable-next-line import/no-unused-modules
|
||||||
|
|
||||||
|
var passive = {
|
||||||
|
passive: true
|
||||||
|
};
|
||||||
|
|
||||||
|
function effect(_ref) {
|
||||||
|
var state = _ref.state,
|
||||||
|
instance = _ref.instance,
|
||||||
|
options = _ref.options;
|
||||||
|
var _options$scroll = options.scroll,
|
||||||
|
scroll = _options$scroll === void 0 ? true : _options$scroll,
|
||||||
|
_options$resize = options.resize,
|
||||||
|
resize = _options$resize === void 0 ? true : _options$resize;
|
||||||
|
var window = getWindow(state.elements.popper);
|
||||||
|
var scrollParents = [].concat(state.scrollParents.reference, state.scrollParents.popper);
|
||||||
|
|
||||||
|
if (scroll) {
|
||||||
|
scrollParents.forEach(function (scrollParent) {
|
||||||
|
scrollParent.addEventListener('scroll', instance.update, passive);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (resize) {
|
||||||
|
window.addEventListener('resize', instance.update, passive);
|
||||||
|
}
|
||||||
|
|
||||||
|
return function () {
|
||||||
|
if (scroll) {
|
||||||
|
scrollParents.forEach(function (scrollParent) {
|
||||||
|
scrollParent.removeEventListener('scroll', instance.update, passive);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (resize) {
|
||||||
|
window.removeEventListener('resize', instance.update, passive);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
} // eslint-disable-next-line import/no-unused-modules
|
||||||
|
|
||||||
|
|
||||||
|
export default {
|
||||||
|
name: 'eventListeners',
|
||||||
|
enabled: true,
|
||||||
|
phase: 'write',
|
||||||
|
fn: function fn() {},
|
||||||
|
effect: effect,
|
||||||
|
data: {}
|
||||||
|
};
|
||||||
147
node_modules/@popperjs/core/dist/esm/modifiers/flip.js
generated
vendored
Normal file
147
node_modules/@popperjs/core/dist/esm/modifiers/flip.js
generated
vendored
Normal file
@@ -0,0 +1,147 @@
|
|||||||
|
import getOppositePlacement from "../utils/getOppositePlacement.js";
|
||||||
|
import getBasePlacement from "../utils/getBasePlacement.js";
|
||||||
|
import getOppositeVariationPlacement from "../utils/getOppositeVariationPlacement.js";
|
||||||
|
import detectOverflow from "../utils/detectOverflow.js";
|
||||||
|
import computeAutoPlacement from "../utils/computeAutoPlacement.js";
|
||||||
|
import { bottom, top, start, right, left, auto } from "../enums.js";
|
||||||
|
import getVariation from "../utils/getVariation.js"; // eslint-disable-next-line import/no-unused-modules
|
||||||
|
|
||||||
|
function getExpandedFallbackPlacements(placement) {
|
||||||
|
if (getBasePlacement(placement) === auto) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
var oppositePlacement = getOppositePlacement(placement);
|
||||||
|
return [getOppositeVariationPlacement(placement), oppositePlacement, getOppositeVariationPlacement(oppositePlacement)];
|
||||||
|
}
|
||||||
|
|
||||||
|
function flip(_ref) {
|
||||||
|
var state = _ref.state,
|
||||||
|
options = _ref.options,
|
||||||
|
name = _ref.name;
|
||||||
|
|
||||||
|
if (state.modifiersData[name]._skip) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var _options$mainAxis = options.mainAxis,
|
||||||
|
checkMainAxis = _options$mainAxis === void 0 ? true : _options$mainAxis,
|
||||||
|
_options$altAxis = options.altAxis,
|
||||||
|
checkAltAxis = _options$altAxis === void 0 ? true : _options$altAxis,
|
||||||
|
specifiedFallbackPlacements = options.fallbackPlacements,
|
||||||
|
padding = options.padding,
|
||||||
|
boundary = options.boundary,
|
||||||
|
rootBoundary = options.rootBoundary,
|
||||||
|
altBoundary = options.altBoundary,
|
||||||
|
_options$flipVariatio = options.flipVariations,
|
||||||
|
flipVariations = _options$flipVariatio === void 0 ? true : _options$flipVariatio,
|
||||||
|
allowedAutoPlacements = options.allowedAutoPlacements;
|
||||||
|
var preferredPlacement = state.options.placement;
|
||||||
|
var basePlacement = getBasePlacement(preferredPlacement);
|
||||||
|
var isBasePlacement = basePlacement === preferredPlacement;
|
||||||
|
var fallbackPlacements = specifiedFallbackPlacements || (isBasePlacement || !flipVariations ? [getOppositePlacement(preferredPlacement)] : getExpandedFallbackPlacements(preferredPlacement));
|
||||||
|
var placements = [preferredPlacement].concat(fallbackPlacements).reduce(function (acc, placement) {
|
||||||
|
return acc.concat(getBasePlacement(placement) === auto ? computeAutoPlacement(state, {
|
||||||
|
placement: placement,
|
||||||
|
boundary: boundary,
|
||||||
|
rootBoundary: rootBoundary,
|
||||||
|
padding: padding,
|
||||||
|
flipVariations: flipVariations,
|
||||||
|
allowedAutoPlacements: allowedAutoPlacements
|
||||||
|
}) : placement);
|
||||||
|
}, []);
|
||||||
|
var referenceRect = state.rects.reference;
|
||||||
|
var popperRect = state.rects.popper;
|
||||||
|
var checksMap = new Map();
|
||||||
|
var makeFallbackChecks = true;
|
||||||
|
var firstFittingPlacement = placements[0];
|
||||||
|
|
||||||
|
for (var i = 0; i < placements.length; i++) {
|
||||||
|
var placement = placements[i];
|
||||||
|
|
||||||
|
var _basePlacement = getBasePlacement(placement);
|
||||||
|
|
||||||
|
var isStartVariation = getVariation(placement) === start;
|
||||||
|
var isVertical = [top, bottom].indexOf(_basePlacement) >= 0;
|
||||||
|
var len = isVertical ? 'width' : 'height';
|
||||||
|
var overflow = detectOverflow(state, {
|
||||||
|
placement: placement,
|
||||||
|
boundary: boundary,
|
||||||
|
rootBoundary: rootBoundary,
|
||||||
|
altBoundary: altBoundary,
|
||||||
|
padding: padding
|
||||||
|
});
|
||||||
|
var mainVariationSide = isVertical ? isStartVariation ? right : left : isStartVariation ? bottom : top;
|
||||||
|
|
||||||
|
if (referenceRect[len] > popperRect[len]) {
|
||||||
|
mainVariationSide = getOppositePlacement(mainVariationSide);
|
||||||
|
}
|
||||||
|
|
||||||
|
var altVariationSide = getOppositePlacement(mainVariationSide);
|
||||||
|
var checks = [];
|
||||||
|
|
||||||
|
if (checkMainAxis) {
|
||||||
|
checks.push(overflow[_basePlacement] <= 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (checkAltAxis) {
|
||||||
|
checks.push(overflow[mainVariationSide] <= 0, overflow[altVariationSide] <= 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (checks.every(function (check) {
|
||||||
|
return check;
|
||||||
|
})) {
|
||||||
|
firstFittingPlacement = placement;
|
||||||
|
makeFallbackChecks = false;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
checksMap.set(placement, checks);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (makeFallbackChecks) {
|
||||||
|
// `2` may be desired in some cases – research later
|
||||||
|
var numberOfChecks = flipVariations ? 3 : 1;
|
||||||
|
|
||||||
|
var _loop = function _loop(_i) {
|
||||||
|
var fittingPlacement = placements.find(function (placement) {
|
||||||
|
var checks = checksMap.get(placement);
|
||||||
|
|
||||||
|
if (checks) {
|
||||||
|
return checks.slice(0, _i).every(function (check) {
|
||||||
|
return check;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (fittingPlacement) {
|
||||||
|
firstFittingPlacement = fittingPlacement;
|
||||||
|
return "break";
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
for (var _i = numberOfChecks; _i > 0; _i--) {
|
||||||
|
var _ret = _loop(_i);
|
||||||
|
|
||||||
|
if (_ret === "break") break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (state.placement !== firstFittingPlacement) {
|
||||||
|
state.modifiersData[name]._skip = true;
|
||||||
|
state.placement = firstFittingPlacement;
|
||||||
|
state.reset = true;
|
||||||
|
}
|
||||||
|
} // eslint-disable-next-line import/no-unused-modules
|
||||||
|
|
||||||
|
|
||||||
|
export default {
|
||||||
|
name: 'flip',
|
||||||
|
enabled: true,
|
||||||
|
phase: 'main',
|
||||||
|
fn: flip,
|
||||||
|
requiresIfExists: ['offset'],
|
||||||
|
data: {
|
||||||
|
_skip: false
|
||||||
|
}
|
||||||
|
};
|
||||||
61
node_modules/@popperjs/core/dist/esm/modifiers/hide.js
generated
vendored
Normal file
61
node_modules/@popperjs/core/dist/esm/modifiers/hide.js
generated
vendored
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
import { top, bottom, left, right } from "../enums.js";
|
||||||
|
import detectOverflow from "../utils/detectOverflow.js";
|
||||||
|
|
||||||
|
function getSideOffsets(overflow, rect, preventedOffsets) {
|
||||||
|
if (preventedOffsets === void 0) {
|
||||||
|
preventedOffsets = {
|
||||||
|
x: 0,
|
||||||
|
y: 0
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
top: overflow.top - rect.height - preventedOffsets.y,
|
||||||
|
right: overflow.right - rect.width + preventedOffsets.x,
|
||||||
|
bottom: overflow.bottom - rect.height + preventedOffsets.y,
|
||||||
|
left: overflow.left - rect.width - preventedOffsets.x
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function isAnySideFullyClipped(overflow) {
|
||||||
|
return [top, right, bottom, left].some(function (side) {
|
||||||
|
return overflow[side] >= 0;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function hide(_ref) {
|
||||||
|
var state = _ref.state,
|
||||||
|
name = _ref.name;
|
||||||
|
var referenceRect = state.rects.reference;
|
||||||
|
var popperRect = state.rects.popper;
|
||||||
|
var preventedOffsets = state.modifiersData.preventOverflow;
|
||||||
|
var referenceOverflow = detectOverflow(state, {
|
||||||
|
elementContext: 'reference'
|
||||||
|
});
|
||||||
|
var popperAltOverflow = detectOverflow(state, {
|
||||||
|
altBoundary: true
|
||||||
|
});
|
||||||
|
var referenceClippingOffsets = getSideOffsets(referenceOverflow, referenceRect);
|
||||||
|
var popperEscapeOffsets = getSideOffsets(popperAltOverflow, popperRect, preventedOffsets);
|
||||||
|
var isReferenceHidden = isAnySideFullyClipped(referenceClippingOffsets);
|
||||||
|
var hasPopperEscaped = isAnySideFullyClipped(popperEscapeOffsets);
|
||||||
|
state.modifiersData[name] = {
|
||||||
|
referenceClippingOffsets: referenceClippingOffsets,
|
||||||
|
popperEscapeOffsets: popperEscapeOffsets,
|
||||||
|
isReferenceHidden: isReferenceHidden,
|
||||||
|
hasPopperEscaped: hasPopperEscaped
|
||||||
|
};
|
||||||
|
state.attributes.popper = Object.assign({}, state.attributes.popper, {
|
||||||
|
'data-popper-reference-hidden': isReferenceHidden,
|
||||||
|
'data-popper-escaped': hasPopperEscaped
|
||||||
|
});
|
||||||
|
} // eslint-disable-next-line import/no-unused-modules
|
||||||
|
|
||||||
|
|
||||||
|
export default {
|
||||||
|
name: 'hide',
|
||||||
|
enabled: true,
|
||||||
|
phase: 'main',
|
||||||
|
requiresIfExists: ['preventOverflow'],
|
||||||
|
fn: hide
|
||||||
|
};
|
||||||
9
node_modules/@popperjs/core/dist/esm/modifiers/index.js
generated
vendored
Normal file
9
node_modules/@popperjs/core/dist/esm/modifiers/index.js
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
export { default as applyStyles } from "./applyStyles.js";
|
||||||
|
export { default as arrow } from "./arrow.js";
|
||||||
|
export { default as computeStyles } from "./computeStyles.js";
|
||||||
|
export { default as eventListeners } from "./eventListeners.js";
|
||||||
|
export { default as flip } from "./flip.js";
|
||||||
|
export { default as hide } from "./hide.js";
|
||||||
|
export { default as offset } from "./offset.js";
|
||||||
|
export { default as popperOffsets } from "./popperOffsets.js";
|
||||||
|
export { default as preventOverflow } from "./preventOverflow.js";
|
||||||
54
node_modules/@popperjs/core/dist/esm/modifiers/offset.js
generated
vendored
Normal file
54
node_modules/@popperjs/core/dist/esm/modifiers/offset.js
generated
vendored
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
import getBasePlacement from "../utils/getBasePlacement.js";
|
||||||
|
import { top, left, right, placements } from "../enums.js"; // eslint-disable-next-line import/no-unused-modules
|
||||||
|
|
||||||
|
export function distanceAndSkiddingToXY(placement, rects, offset) {
|
||||||
|
var basePlacement = getBasePlacement(placement);
|
||||||
|
var invertDistance = [left, top].indexOf(basePlacement) >= 0 ? -1 : 1;
|
||||||
|
|
||||||
|
var _ref = typeof offset === 'function' ? offset(Object.assign({}, rects, {
|
||||||
|
placement: placement
|
||||||
|
})) : offset,
|
||||||
|
skidding = _ref[0],
|
||||||
|
distance = _ref[1];
|
||||||
|
|
||||||
|
skidding = skidding || 0;
|
||||||
|
distance = (distance || 0) * invertDistance;
|
||||||
|
return [left, right].indexOf(basePlacement) >= 0 ? {
|
||||||
|
x: distance,
|
||||||
|
y: skidding
|
||||||
|
} : {
|
||||||
|
x: skidding,
|
||||||
|
y: distance
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function offset(_ref2) {
|
||||||
|
var state = _ref2.state,
|
||||||
|
options = _ref2.options,
|
||||||
|
name = _ref2.name;
|
||||||
|
var _options$offset = options.offset,
|
||||||
|
offset = _options$offset === void 0 ? [0, 0] : _options$offset;
|
||||||
|
var data = placements.reduce(function (acc, placement) {
|
||||||
|
acc[placement] = distanceAndSkiddingToXY(placement, state.rects, offset);
|
||||||
|
return acc;
|
||||||
|
}, {});
|
||||||
|
var _data$state$placement = data[state.placement],
|
||||||
|
x = _data$state$placement.x,
|
||||||
|
y = _data$state$placement.y;
|
||||||
|
|
||||||
|
if (state.modifiersData.popperOffsets != null) {
|
||||||
|
state.modifiersData.popperOffsets.x += x;
|
||||||
|
state.modifiersData.popperOffsets.y += y;
|
||||||
|
}
|
||||||
|
|
||||||
|
state.modifiersData[name] = data;
|
||||||
|
} // eslint-disable-next-line import/no-unused-modules
|
||||||
|
|
||||||
|
|
||||||
|
export default {
|
||||||
|
name: 'offset',
|
||||||
|
enabled: true,
|
||||||
|
phase: 'main',
|
||||||
|
requires: ['popperOffsets'],
|
||||||
|
fn: offset
|
||||||
|
};
|
||||||
25
node_modules/@popperjs/core/dist/esm/modifiers/popperOffsets.js
generated
vendored
Normal file
25
node_modules/@popperjs/core/dist/esm/modifiers/popperOffsets.js
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
import computeOffsets from "../utils/computeOffsets.js";
|
||||||
|
|
||||||
|
function popperOffsets(_ref) {
|
||||||
|
var state = _ref.state,
|
||||||
|
name = _ref.name;
|
||||||
|
// Offsets are the actual position the popper needs to have to be
|
||||||
|
// properly positioned near its reference element
|
||||||
|
// This is the most basic placement, and will be adjusted by
|
||||||
|
// the modifiers in the next step
|
||||||
|
state.modifiersData[name] = computeOffsets({
|
||||||
|
reference: state.rects.reference,
|
||||||
|
element: state.rects.popper,
|
||||||
|
strategy: 'absolute',
|
||||||
|
placement: state.placement
|
||||||
|
});
|
||||||
|
} // eslint-disable-next-line import/no-unused-modules
|
||||||
|
|
||||||
|
|
||||||
|
export default {
|
||||||
|
name: 'popperOffsets',
|
||||||
|
enabled: true,
|
||||||
|
phase: 'read',
|
||||||
|
fn: popperOffsets,
|
||||||
|
data: {}
|
||||||
|
};
|
||||||
142
node_modules/@popperjs/core/dist/esm/modifiers/preventOverflow.js
generated
vendored
Normal file
142
node_modules/@popperjs/core/dist/esm/modifiers/preventOverflow.js
generated
vendored
Normal file
@@ -0,0 +1,142 @@
|
|||||||
|
import { top, left, right, bottom, start } from "../enums.js";
|
||||||
|
import getBasePlacement from "../utils/getBasePlacement.js";
|
||||||
|
import getMainAxisFromPlacement from "../utils/getMainAxisFromPlacement.js";
|
||||||
|
import getAltAxis from "../utils/getAltAxis.js";
|
||||||
|
import { within, withinMaxClamp } from "../utils/within.js";
|
||||||
|
import getLayoutRect from "../dom-utils/getLayoutRect.js";
|
||||||
|
import getOffsetParent from "../dom-utils/getOffsetParent.js";
|
||||||
|
import detectOverflow from "../utils/detectOverflow.js";
|
||||||
|
import getVariation from "../utils/getVariation.js";
|
||||||
|
import getFreshSideObject from "../utils/getFreshSideObject.js";
|
||||||
|
import { min as mathMin, max as mathMax } from "../utils/math.js";
|
||||||
|
|
||||||
|
function preventOverflow(_ref) {
|
||||||
|
var state = _ref.state,
|
||||||
|
options = _ref.options,
|
||||||
|
name = _ref.name;
|
||||||
|
var _options$mainAxis = options.mainAxis,
|
||||||
|
checkMainAxis = _options$mainAxis === void 0 ? true : _options$mainAxis,
|
||||||
|
_options$altAxis = options.altAxis,
|
||||||
|
checkAltAxis = _options$altAxis === void 0 ? false : _options$altAxis,
|
||||||
|
boundary = options.boundary,
|
||||||
|
rootBoundary = options.rootBoundary,
|
||||||
|
altBoundary = options.altBoundary,
|
||||||
|
padding = options.padding,
|
||||||
|
_options$tether = options.tether,
|
||||||
|
tether = _options$tether === void 0 ? true : _options$tether,
|
||||||
|
_options$tetherOffset = options.tetherOffset,
|
||||||
|
tetherOffset = _options$tetherOffset === void 0 ? 0 : _options$tetherOffset;
|
||||||
|
var overflow = detectOverflow(state, {
|
||||||
|
boundary: boundary,
|
||||||
|
rootBoundary: rootBoundary,
|
||||||
|
padding: padding,
|
||||||
|
altBoundary: altBoundary
|
||||||
|
});
|
||||||
|
var basePlacement = getBasePlacement(state.placement);
|
||||||
|
var variation = getVariation(state.placement);
|
||||||
|
var isBasePlacement = !variation;
|
||||||
|
var mainAxis = getMainAxisFromPlacement(basePlacement);
|
||||||
|
var altAxis = getAltAxis(mainAxis);
|
||||||
|
var popperOffsets = state.modifiersData.popperOffsets;
|
||||||
|
var referenceRect = state.rects.reference;
|
||||||
|
var popperRect = state.rects.popper;
|
||||||
|
var tetherOffsetValue = typeof tetherOffset === 'function' ? tetherOffset(Object.assign({}, state.rects, {
|
||||||
|
placement: state.placement
|
||||||
|
})) : tetherOffset;
|
||||||
|
var normalizedTetherOffsetValue = typeof tetherOffsetValue === 'number' ? {
|
||||||
|
mainAxis: tetherOffsetValue,
|
||||||
|
altAxis: tetherOffsetValue
|
||||||
|
} : Object.assign({
|
||||||
|
mainAxis: 0,
|
||||||
|
altAxis: 0
|
||||||
|
}, tetherOffsetValue);
|
||||||
|
var offsetModifierState = state.modifiersData.offset ? state.modifiersData.offset[state.placement] : null;
|
||||||
|
var data = {
|
||||||
|
x: 0,
|
||||||
|
y: 0
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!popperOffsets) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (checkMainAxis) {
|
||||||
|
var _offsetModifierState$;
|
||||||
|
|
||||||
|
var mainSide = mainAxis === 'y' ? top : left;
|
||||||
|
var altSide = mainAxis === 'y' ? bottom : right;
|
||||||
|
var len = mainAxis === 'y' ? 'height' : 'width';
|
||||||
|
var offset = popperOffsets[mainAxis];
|
||||||
|
var min = offset + overflow[mainSide];
|
||||||
|
var max = offset - overflow[altSide];
|
||||||
|
var additive = tether ? -popperRect[len] / 2 : 0;
|
||||||
|
var minLen = variation === start ? referenceRect[len] : popperRect[len];
|
||||||
|
var maxLen = variation === start ? -popperRect[len] : -referenceRect[len]; // We need to include the arrow in the calculation so the arrow doesn't go
|
||||||
|
// outside the reference bounds
|
||||||
|
|
||||||
|
var arrowElement = state.elements.arrow;
|
||||||
|
var arrowRect = tether && arrowElement ? getLayoutRect(arrowElement) : {
|
||||||
|
width: 0,
|
||||||
|
height: 0
|
||||||
|
};
|
||||||
|
var arrowPaddingObject = state.modifiersData['arrow#persistent'] ? state.modifiersData['arrow#persistent'].padding : getFreshSideObject();
|
||||||
|
var arrowPaddingMin = arrowPaddingObject[mainSide];
|
||||||
|
var arrowPaddingMax = arrowPaddingObject[altSide]; // If the reference length is smaller than the arrow length, we don't want
|
||||||
|
// to include its full size in the calculation. If the reference is small
|
||||||
|
// and near the edge of a boundary, the popper can overflow even if the
|
||||||
|
// reference is not overflowing as well (e.g. virtual elements with no
|
||||||
|
// width or height)
|
||||||
|
|
||||||
|
var arrowLen = within(0, referenceRect[len], arrowRect[len]);
|
||||||
|
var minOffset = isBasePlacement ? referenceRect[len] / 2 - additive - arrowLen - arrowPaddingMin - normalizedTetherOffsetValue.mainAxis : minLen - arrowLen - arrowPaddingMin - normalizedTetherOffsetValue.mainAxis;
|
||||||
|
var maxOffset = isBasePlacement ? -referenceRect[len] / 2 + additive + arrowLen + arrowPaddingMax + normalizedTetherOffsetValue.mainAxis : maxLen + arrowLen + arrowPaddingMax + normalizedTetherOffsetValue.mainAxis;
|
||||||
|
var arrowOffsetParent = state.elements.arrow && getOffsetParent(state.elements.arrow);
|
||||||
|
var clientOffset = arrowOffsetParent ? mainAxis === 'y' ? arrowOffsetParent.clientTop || 0 : arrowOffsetParent.clientLeft || 0 : 0;
|
||||||
|
var offsetModifierValue = (_offsetModifierState$ = offsetModifierState == null ? void 0 : offsetModifierState[mainAxis]) != null ? _offsetModifierState$ : 0;
|
||||||
|
var tetherMin = offset + minOffset - offsetModifierValue - clientOffset;
|
||||||
|
var tetherMax = offset + maxOffset - offsetModifierValue;
|
||||||
|
var preventedOffset = within(tether ? mathMin(min, tetherMin) : min, offset, tether ? mathMax(max, tetherMax) : max);
|
||||||
|
popperOffsets[mainAxis] = preventedOffset;
|
||||||
|
data[mainAxis] = preventedOffset - offset;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (checkAltAxis) {
|
||||||
|
var _offsetModifierState$2;
|
||||||
|
|
||||||
|
var _mainSide = mainAxis === 'x' ? top : left;
|
||||||
|
|
||||||
|
var _altSide = mainAxis === 'x' ? bottom : right;
|
||||||
|
|
||||||
|
var _offset = popperOffsets[altAxis];
|
||||||
|
|
||||||
|
var _len = altAxis === 'y' ? 'height' : 'width';
|
||||||
|
|
||||||
|
var _min = _offset + overflow[_mainSide];
|
||||||
|
|
||||||
|
var _max = _offset - overflow[_altSide];
|
||||||
|
|
||||||
|
var isOriginSide = [top, left].indexOf(basePlacement) !== -1;
|
||||||
|
|
||||||
|
var _offsetModifierValue = (_offsetModifierState$2 = offsetModifierState == null ? void 0 : offsetModifierState[altAxis]) != null ? _offsetModifierState$2 : 0;
|
||||||
|
|
||||||
|
var _tetherMin = isOriginSide ? _min : _offset - referenceRect[_len] - popperRect[_len] - _offsetModifierValue + normalizedTetherOffsetValue.altAxis;
|
||||||
|
|
||||||
|
var _tetherMax = isOriginSide ? _offset + referenceRect[_len] + popperRect[_len] - _offsetModifierValue - normalizedTetherOffsetValue.altAxis : _max;
|
||||||
|
|
||||||
|
var _preventedOffset = tether && isOriginSide ? withinMaxClamp(_tetherMin, _offset, _tetherMax) : within(tether ? _tetherMin : _min, _offset, tether ? _tetherMax : _max);
|
||||||
|
|
||||||
|
popperOffsets[altAxis] = _preventedOffset;
|
||||||
|
data[altAxis] = _preventedOffset - _offset;
|
||||||
|
}
|
||||||
|
|
||||||
|
state.modifiersData[name] = data;
|
||||||
|
} // eslint-disable-next-line import/no-unused-modules
|
||||||
|
|
||||||
|
|
||||||
|
export default {
|
||||||
|
name: 'preventOverflow',
|
||||||
|
enabled: true,
|
||||||
|
phase: 'main',
|
||||||
|
fn: preventOverflow,
|
||||||
|
requiresIfExists: ['offset']
|
||||||
|
};
|
||||||
3
node_modules/@popperjs/core/dist/esm/popper-base.js
generated
vendored
Normal file
3
node_modules/@popperjs/core/dist/esm/popper-base.js
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
import { createPopper, popperGenerator, detectOverflow } from "./createPopper.js";
|
||||||
|
// eslint-disable-next-line import/no-unused-modules
|
||||||
|
export { createPopper, popperGenerator, detectOverflow };
|
||||||
11
node_modules/@popperjs/core/dist/esm/popper-lite.js
generated
vendored
Normal file
11
node_modules/@popperjs/core/dist/esm/popper-lite.js
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
import { popperGenerator, detectOverflow } from "./createPopper.js";
|
||||||
|
import eventListeners from "./modifiers/eventListeners.js";
|
||||||
|
import popperOffsets from "./modifiers/popperOffsets.js";
|
||||||
|
import computeStyles from "./modifiers/computeStyles.js";
|
||||||
|
import applyStyles from "./modifiers/applyStyles.js";
|
||||||
|
var defaultModifiers = [eventListeners, popperOffsets, computeStyles, applyStyles];
|
||||||
|
var createPopper = /*#__PURE__*/popperGenerator({
|
||||||
|
defaultModifiers: defaultModifiers
|
||||||
|
}); // eslint-disable-next-line import/no-unused-modules
|
||||||
|
|
||||||
|
export { createPopper, popperGenerator, defaultModifiers, detectOverflow };
|
||||||
20
node_modules/@popperjs/core/dist/esm/popper.js
generated
vendored
Normal file
20
node_modules/@popperjs/core/dist/esm/popper.js
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
import { popperGenerator, detectOverflow } from "./createPopper.js";
|
||||||
|
import eventListeners from "./modifiers/eventListeners.js";
|
||||||
|
import popperOffsets from "./modifiers/popperOffsets.js";
|
||||||
|
import computeStyles from "./modifiers/computeStyles.js";
|
||||||
|
import applyStyles from "./modifiers/applyStyles.js";
|
||||||
|
import offset from "./modifiers/offset.js";
|
||||||
|
import flip from "./modifiers/flip.js";
|
||||||
|
import preventOverflow from "./modifiers/preventOverflow.js";
|
||||||
|
import arrow from "./modifiers/arrow.js";
|
||||||
|
import hide from "./modifiers/hide.js";
|
||||||
|
var defaultModifiers = [eventListeners, popperOffsets, computeStyles, applyStyles, offset, flip, preventOverflow, arrow, hide];
|
||||||
|
var createPopper = /*#__PURE__*/popperGenerator({
|
||||||
|
defaultModifiers: defaultModifiers
|
||||||
|
}); // eslint-disable-next-line import/no-unused-modules
|
||||||
|
|
||||||
|
export { createPopper, popperGenerator, defaultModifiers, detectOverflow }; // eslint-disable-next-line import/no-unused-modules
|
||||||
|
|
||||||
|
export { createPopper as createPopperLite } from "./popper-lite.js"; // eslint-disable-next-line import/no-unused-modules
|
||||||
|
|
||||||
|
export * from "./modifiers/index.js";
|
||||||
0
node_modules/@popperjs/core/dist/esm/types.js
generated
vendored
Normal file
0
node_modules/@popperjs/core/dist/esm/types.js
generated
vendored
Normal file
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user