Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@a_kawashiro/jendeley

Package Overview
Dependencies
Maintainers
1
Versions
62
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@a_kawashiro/jendeley - npm Package Compare versions

Comparing version 0.0.18 to 0.0.19

dist/load_db.js

86

dist/api.js

@@ -49,16 +49,33 @@ "use strict";

const gen_1 = require("./gen");
const validate_db_1 = require("./validate_db");
const E = __importStar(require("fp-ts/lib/Either"));
const load_db_1 = require("./load_db");
function checkEntry(entry) {
if (entry.title == undefined || entry.path == undefined) {
logger_1.logger.fatal("id = " + entry.id + "entry = " + JSON.stringify(entry, undefined, 2));
if (entry.idType == "url") {
if (entry.title == undefined || entry.path != undefined) {
logger_1.logger.fatal("Check failed in checkEntry: id = " +
entry.id +
" entry = " +
JSON.stringify(entry, undefined, 2));
process.exit(1);
}
}
else {
if (entry.title == undefined || entry.path == undefined) {
logger_1.logger.fatal("Check failed in checkEntry: id = " +
entry.id +
" entry = " +
JSON.stringify(entry, undefined, 2));
process.exit(1);
}
}
}
function getEntry(id, jsonDB) {
if (jsonDB[id] == undefined) {
throw Error("json[" + id + "] != undefined");
logger_1.logger.fatal("json[" + id + "] != undefined");
process.exit(1);
}
const entryInDB = jsonDB[id];
if (entryInDB.idType == "meta") {
throw Error("metadata = " + JSON.stringify(entryInDB));
logger_1.logger.fatal("metadata = " + JSON.stringify(entryInDB));
process.exit(1);
}

@@ -204,3 +221,3 @@ if (entryInDB.idType == "url") {

if (entryInDB.idType != constants_1.ID_TYPE_PATH) {
throw new Error(jsonDB[id][constants_1.ENTRY_ID_TYPE] +
logger_1.logger.fatal(jsonDB[id][constants_1.ENTRY_ID_TYPE] +
" must be " +

@@ -211,2 +228,3 @@ constants_1.ID_TYPE_PATH +

" is not.");
process.exit(1);
}

@@ -241,3 +259,3 @@ const pathEntry = entryInDB;

const entry = entry_o;
let jsonDB = JSON.parse(fs_1.default.readFileSync(dbPath).toString());
let jsonDB = (0, load_db_1.loadDB)(dbPath, false);
if (jsonDB[entry.id] != undefined) {

@@ -248,6 +266,3 @@ logger_1.logger.info("Update DB with entry = " + JSON.stringify(entry));

}
if (!(0, validate_db_1.validateJsonDB)(jsonDB, dbPath)) {
throw new Error("validateJsonDB failed!");
}
fs_1.default.writeFileSync(dbPath, JSON.stringify(jsonDB));
(0, load_db_1.saveDB)(jsonDB, dbPath);
const r = {

@@ -271,7 +286,7 @@ isSucceeded: true,

exports.updateEntry = updateEntry;
function getPdf(request, response, db_path) {
function getPdf(request, response, dbPath) {
logger_1.logger.info("Get a get_pdf request", request.url);
const params = url_1.default.parse(request.url, true).query;
const pdf_path = unescape(base_64_1.default.decode(params.file));
const pdf = fs_1.default.readFileSync(path_1.default.join(path_1.default.dirname(db_path), pdf_path));
const pdfPath = unescape(base_64_1.default.decode(params.file));
const pdf = fs_1.default.readFileSync(path_1.default.join(path_1.default.dirname(dbPath), pdfPath));
response.writeHead(200, {

@@ -284,6 +299,6 @@ "Content-Type": "application/pdf",

exports.getPdf = getPdf;
function getDB(request, response, dbPathDB) {
function getDB(request, response, dbPath) {
logger_1.logger.info("Get a get_db request" + request.url);
const jsonDB = JSON.parse(fs_1.default.readFileSync(dbPathDB).toString());
let db_response = [];
const jsonDB = (0, load_db_1.loadDB)(dbPath, false);
let dbResponse = [];
for (const id of Object.keys(jsonDB)) {

@@ -295,5 +310,5 @@ if (jsonDB[id] == undefined)

const e = getEntry(id, jsonDB);
db_response.push(e);
dbResponse.push(e);
}
response.status(200).json(db_response);
response.status(200).json(dbResponse);
logger_1.logger.info("Sent a response from get_db");

@@ -320,3 +335,3 @@ }

JSON.stringify(req));
const jsonDB = JSON.parse(fs_1.default.readFileSync(dbPath).toString());
const jsonDB = (0, load_db_1.loadDB)(dbPath, false);
const title = req.title == "" ? yield getTitleFromUrl(req.url) : req.title;

@@ -329,6 +344,3 @@ const date = new Date();

if (E.isRight(newDBOrError)) {
if (!(0, validate_db_1.validateJsonDB)(E.toUnion(newDBOrError), dbPath)) {
throw new Error("validateJsonDB failed!");
}
fs_1.default.writeFileSync(dbPath, JSON.stringify(E.toUnion(newDBOrError)));
(0, load_db_1.saveDB)(E.toUnion(newDBOrError), dbPath);
const r = {

@@ -389,3 +401,3 @@ isSucceeded: true,

yield download(req.url, path_1.default.join(path_1.default.dirname(dbPath), filename));
const jsonDB = JSON.parse(fs_1.default.readFileSync(dbPath).toString());
const jsonDB = (0, load_db_1.loadDB)(dbPath, false);
const date = new Date();

@@ -395,8 +407,7 @@ const date_tag = date.toISOString().split("T")[0];

tags.push(date_tag);
const newDBOrError = yield (0, gen_1.registerNonBookPDF)(path_1.default.dirname(dbPath), filename, jsonDB, req.title, req.comments, tags, true, req.url);
if (E.isRight(newDBOrError)) {
if (!(0, validate_db_1.validateJsonDB)(E.toUnion(newDBOrError), dbPath)) {
throw new Error("validateJsonDB failed!");
}
fs_1.default.writeFileSync(dbPath, JSON.stringify(E.toUnion(newDBOrError)));
const idEntryOrError = yield (0, gen_1.registerNonBookPDF)(path_1.default.dirname(dbPath), filename, jsonDB, req.title, req.comments, tags, true, req.url);
if (E.isRight(idEntryOrError)) {
const t = E.toUnion(idEntryOrError);
jsonDB[t[0]] = t[1];
(0, load_db_1.saveDB)(jsonDB, dbPath);
const r = {

@@ -409,3 +420,3 @@ isSucceeded: true,

else {
const err = E.toUnion(newDBOrError);
const err = E.toUnion(idEntryOrError);
const r = {

@@ -421,3 +432,3 @@ isSucceeded: false,

exports.addPdfFromUrl = addPdfFromUrl;
function deleteEntry(request, response, db_path) {
function deleteEntry(request, response, dbPath) {
logger_1.logger.info("Get a delete_entry request url = " + request.url);

@@ -427,7 +438,7 @@ const entry_o = request.body;

const entry = entry_o;
let jsonDB = JSON.parse(fs_1.default.readFileSync(db_path).toString());
let jsonDB = (0, load_db_1.loadDB)(dbPath, false);
if (jsonDB[entry.id] != undefined &&
jsonDB[entry.id][constants_1.ENTRY_PATH] != undefined) {
logger_1.logger.info("Delete " + jsonDB[entry.id]["path"]);
const old_filename = path_1.default.join(path_1.default.dirname(db_path), jsonDB[entry.id]["path"]);
const old_filename = path_1.default.join(path_1.default.dirname(dbPath), jsonDB[entry.id]["path"]);
const dir = path_1.default.dirname(old_filename);

@@ -450,6 +461,3 @@ const new_filename = path_1.default.join(dir, path_1.default.basename(old_filename, ".pdf") + " " + constants_1.JENDELEY_NO_TRACK + ".pdf");

}
if (!(0, validate_db_1.validateJsonDB)(jsonDB, db_path)) {
throw new Error("validateJsonDB failed!");
}
fs_1.default.writeFileSync(db_path, JSON.stringify(jsonDB));
(0, load_db_1.saveDB)(jsonDB, dbPath);
const r = {

@@ -456,0 +464,0 @@ isSucceeded: true,

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.JENDELEY_VERSION = exports.ARXIV_API_URL = exports.ID_TYPE_URL = exports.ID_TYPE_META = exports.ID_TYPE_BOOK = exports.ID_TYPE_PATH = exports.ID_TYPE_ISBN = exports.ID_TYPE_DOI = exports.ID_TYPE_ARXIV = exports.ENTRY_DATA_FROM_ARXIV = exports.ENTRY_TITLE = exports.ENTRY_COMMENTS = exports.ENTRY_TAGS = exports.ENTRY_URL = exports.ENTRY_PATH = exports.ENTRY_ID_TYPE = exports.DB_META_KEY = exports.JENDELEY_NO_ID = exports.JENDELEY_NO_TRACK = void 0;
exports.JENDELEY_DIR = exports.JENDELEY_VERSION = exports.ARXIV_API_URL = exports.ID_TYPE_URL = exports.ID_TYPE_META = exports.ID_TYPE_BOOK = exports.ID_TYPE_PATH = exports.ID_TYPE_ISBN = exports.ID_TYPE_DOI = exports.ID_TYPE_ARXIV = exports.ENTRY_DATA_FROM_ARXIV = exports.ENTRY_TITLE = exports.ENTRY_COMMENTS = exports.ENTRY_TAGS = exports.ENTRY_URL = exports.ENTRY_PATH = exports.ENTRY_ID_TYPE = exports.DB_META_KEY = exports.JENDELEY_NO_ID = exports.JENDELEY_NO_TRACK = void 0;
const JENDELEY_NO_TRACK = "[jendeley no track]";

@@ -40,4 +40,6 @@ exports.JENDELEY_NO_TRACK = JENDELEY_NO_TRACK;

exports.ENTRY_DATA_FROM_ARXIV = ENTRY_DATA_FROM_ARXIV;
const JENDELEY_VERSION = "0.0.18";
const JENDELEY_VERSION = "0.0.19";
exports.JENDELEY_VERSION = JENDELEY_VERSION;
const JENDELEY_DIR = ".jendeley";
exports.JENDELEY_DIR = JENDELEY_DIR;
//# sourceMappingURL=constants.js.map

@@ -227,2 +227,9 @@ "use strict";

}
const regexpArxiv = new RegExp("\\[\\s*jendeley\\s+arxiv\\s+([0-9]{4}_[0-9v]+)\\s*\\]", "g");
const foundArxiv = [...pdf.matchAll(regexpArxiv)];
for (const f of foundArxiv) {
let d = f[1];
d = d.substring(0, 4) + "." + d.substring(5);
return E.right({ docIDType: "arxiv", arxiv: d });
}
const regexpISBN = new RegExp(".*\\[\\s*jendeley\\s+isbn\\s+([0-9]{10,})\\s*\\]", "g");

@@ -298,3 +305,4 @@ const foundISBN = [...pdf.matchAll(regexpISBN)];

path_1.default.extname(titleFromPdf) != ".dvi" &&
path_1.default.extname(titleFromPdf) != ".pdf") {
path_1.default.extname(titleFromPdf) != ".pdf" &&
path_1.default.extname(titleFromPdf) != ".tex") {
titles.push(titleFromPdf);

@@ -301,0 +309,0 @@ }

@@ -49,2 +49,3 @@ "use strict";

const E = __importStar(require("fp-ts/lib/Either"));
const load_db_1 = require("./load_db");
function walkPDFDFS(dir) {

@@ -278,6 +279,3 @@ if (!fs_1.default.existsSync(dir)) {

}
if (!(0, validate_db_1.validateJsonDB)(jsonDB, undefined)) {
throw new Error("validateJsonDB failed!");
}
fs_1.default.writeFileSync(output, JSON.stringify(jsonDB, null, 2));
(0, load_db_1.saveDB)(jsonDB, output);
}

@@ -307,5 +305,2 @@ exports.genDummyDB = genDummyDB;

logger_1.logger.info("Register url_" + url);
if (!(0, validate_db_1.validateJsonDB)(jsonDB, undefined)) {
throw new Error("validateJsonDB failed!");
}
return E.right(jsonDB);

@@ -328,2 +323,6 @@ }

comments);
if (!(0, validate_db_1.validateJsonDB)(jsonDB, undefined)) {
logger_1.logger.fatal("validateJsonDB failed in registerNonBookPDF");
process.exit(1);
}
const docID = yield (0, docid_1.getDocID)(pdf, papersDir, false, downloadUrl);

@@ -337,4 +336,3 @@ if (E.isLeft(docID)) {

if (t == undefined) {
logger_1.logger.warn(pdf + " is not valid.");
return E.right(jsonDB);
return E.left(pdf + " is not valid.");
}

@@ -349,8 +347,8 @@ const json = t[0];

if (jsonDB.hasOwnProperty(dbID)) {
logger_1.logger.warn(pdf +
// TODO: Make shell script to delete duplicated files.
console.warn("mv ", '"' + pdf + '" duplicated');
return E.left(pdf +
" is duplicated. You can find another file in " +
jsonDB[dbID][constants_1.ENTRY_PATH] +
".");
console.warn("mv ", '"' + pdf + '" duplicated');
return E.right(jsonDB);
}

@@ -365,4 +363,3 @@ // TODO: Condition of json[ENTRY_ID_TYPE] != "path" is not good

if (fs_1.default.existsSync(path_1.default.join(papersDir, newFilename))) {
logger_1.logger.warn(newFilename + " already exists. Skip registration.");
return E.right(jsonDB);
return E.left(newFilename + " already exists. Skip registration.");
}

@@ -372,11 +369,7 @@ fs_1.default.renameSync(path_1.default.join(papersDir, oldFileneme), path_1.default.join(papersDir, newFilename));

}
jsonDB[dbID] = json;
if (!(0, validate_db_1.validateJsonDB)(jsonDB, undefined)) {
throw new Error("validateJsonDB failed!\n" + JSON.stringify(jsonDB, null, 2));
}
return E.right(jsonDB);
return E.right([dbID, json]);
});
}
exports.registerNonBookPDF = registerNonBookPDF;
function genDB(papersDir, bookDirsStr, dbName) {
function genDB(papersDir, bookDirsStr, dbName, deleteUnreachableFiles) {
return __awaiter(this, void 0, void 0, function* () {

@@ -392,11 +385,35 @@ papersDir = path_1.default.resolve(papersDir);

if (!fs_1.default.existsSync(papersDir)) {
logger_1.logger.warn("papersDir:", papersDir + " is not exist.");
return;
logger_1.logger.fatal("papersDir:", papersDir + " is not exist.");
process.exit(1);
}
for (const bd of bookDirs) {
if (!fs_1.default.existsSync(path_1.default.join(papersDir, bd))) {
logger_1.logger.warn("bd:", path_1.default.join(papersDir, bd) + " is not exist.");
return;
logger_1.logger.fatal("book directory:" + path_1.default.join(papersDir, bd) + " is not exist.");
process.exit(1);
}
}
if (deleteUnreachableFiles) {
if (!fs_1.default.existsSync(path_1.default.join(papersDir, dbName))) {
logger_1.logger.fatal("You use --delete_unreachable_files but " +
path_1.default.join(papersDir, dbName) +
" does not exist.");
process.exit(1);
}
let jsonDB = (0, load_db_1.loadDB)(path_1.default.join(papersDir, dbName), true);
let deletedIDs = [];
for (const id of Object.keys(jsonDB)) {
const e = jsonDB[id];
if (e.idType != "url" && e.idType != "meta") {
const p = e.path;
if (!fs_1.default.existsSync(path_1.default.join(papersDir, p))) {
logger_1.logger.warn(p + " does not exist. Delete entry " + id);
deletedIDs.push(id);
}
}
}
for (const id of deletedIDs) {
delete jsonDB[id];
}
(0, load_db_1.saveDB)(jsonDB, path_1.default.join(papersDir, dbName));
}
let bookChapters = {};

@@ -407,3 +424,3 @@ let jsonDB = {};

if (fs_1.default.existsSync(path_1.default.join(papersDir, dbName))) {
jsonDB = JSON.parse(fs_1.default.readFileSync(path_1.default.join(papersDir, dbName)).toString());
jsonDB = (0, load_db_1.loadDB)(path_1.default.join(papersDir, dbName), false);
for (const id of Object.keys(jsonDB)) {

@@ -442,5 +459,6 @@ exstingPdfs.push(jsonDB[id][constants_1.ENTRY_PATH]);

if (!isBook) {
const newDB = yield registerNonBookPDF(papersDir, p, jsonDB, undefined, "", [], false, undefined);
if (E.isRight(newDB)) {
jsonDB = E.toUnion(newDB);
const idEntryOrError = yield registerNonBookPDF(papersDir, p, jsonDB, undefined, "", [], false, undefined);
if (E.isRight(idEntryOrError)) {
const t = E.toUnion(idEntryOrError);
jsonDB[t[0]] = t[1];
}

@@ -452,3 +470,5 @@ }

if (bookInfo == undefined) {
logger_1.logger.warn("PDFs in " + bookDir + " are ignored. Because we cannot find no ISBN.");
if (bookChapters[bookDir].pdfs.length > 0) {
logger_1.logger.warn("PDFs in " + bookDir + " are ignored. Because we cannot find no ISBN.");
}
continue;

@@ -525,12 +545,4 @@ }

}
try {
const dbPath = path_1.default.join(papersDir, dbName);
if (!(0, validate_db_1.validateJsonDB)(jsonDB, dbPath)) {
throw new Error("validateJsonDB failed!");
}
fs_1.default.writeFileSync(dbPath, JSON.stringify(jsonDB, null, 2));
}
catch (err) {
logger_1.logger.warn(err);
}
const dbPath = path_1.default.join(papersDir, dbName);
(0, load_db_1.saveDB)(jsonDB, dbPath);
});

@@ -537,0 +549,0 @@ }

@@ -94,2 +94,7 @@ "use strict";

}));
test("arXiv from path", () => __awaiter(void 0, void 0, void 0, function* () {
const pdf = "A Program Logic for First-Order Encapsulated WebAssembly [jendeley arxiv 1811_03479v3].pdf";
const docID = yield (0, docid_1.getDocID)(pdf, "/hoge/", false, undefined);
expect(docID).toStrictEqual(rightArxiv("1811.03479v3"));
}));
test("ISBN from path", () => __awaiter(void 0, void 0, void 0, function* () {

@@ -96,0 +101,0 @@ const pdf = "hoge [jendeley isbn 9781467330763].pdf";

@@ -18,2 +18,3 @@ #!/usr/bin/env node

const constants_1 = require("./constants");
const logger_1 = require("./logger");
function main() {

@@ -29,2 +30,3 @@ return __awaiter(this, void 0, void 0, function* () {

.option("--db_name <db_name>", "Name of DB. DB is created under <papers_dir>. By default, <papers_dir>/db.json.")
.option("--delete_unreachable_files", "Delete entries corresponding to unreachable files.")
.action((cmd, options) => {

@@ -37,3 +39,7 @@ const book_dirs_str = options._optionValues.book_dirs == undefined

: options._optionValues.db_name;
(0, gen_1.genDB)(options._optionValues.papers_dir, book_dirs_str, db_name);
if (db_name == constants_1.JENDELEY_DIR) {
logger_1.logger.fatal(constants_1.JENDELEY_DIR + " cannot used as the name of DB.");
process.exit(1);
}
(0, gen_1.genDB)(options._optionValues.papers_dir, book_dirs_str, db_name, options._optionValues.delete_unreachable_files);
});

@@ -40,0 +46,0 @@ program

@@ -67,3 +67,3 @@ "use strict";

const filepath = jsonDB[id][constants_1.ENTRY_PATH];
if (!path_1.default.join(dbDir, filepath)) {
if (!fs_1.default.existsSync(path_1.default.join(dbDir, filepath))) {
logger_1.logger.warn("File not exists: " + filepath + " id: " + id);

@@ -103,2 +103,34 @@ validDB = false;

}
// Check jendeley got valid data from API.
const entry = jsonDB[id];
if (entry.idType == constants_1.ID_TYPE_DOI) {
if (entry.dataFromCrossref["indexed"] == undefined) {
validDB = false;
logger_1.logger.warn("Entry of id = " +
id +
" path = " +
entry.path +
" looks failed to get data from crossref. Please consider change filename to we can find manually written DocID.");
}
}
else if (entry.idType == constants_1.ID_TYPE_ARXIV) {
if (entry.dataFromArxiv["id"] == undefined) {
validDB = false;
logger_1.logger.warn("Entry of id = " +
id +
" path = " +
entry.path +
" looks failed to get data from arxiv. Please consider change filename to we can find manually written DocID.");
}
}
else if (entry.idType == constants_1.ID_TYPE_BOOK || entry.idType == constants_1.ID_TYPE_ISBN) {
if (entry.dataFromNodeIsbn["title"] == undefined) {
validDB = false;
logger_1.logger.warn("Entry of id = " +
id +
" path = " +
entry.path +
" looks failed to get data from isbn. Please consider change filename to we can find manually written DocID.");
}
}
}

@@ -109,5 +141,5 @@ return validDB;

function validateDB(dbPath) {
logger_1.logger.info("validateDB start");
dbPath = path_1.default.resolve(dbPath);
const jsonDB = JSON.parse(fs_1.default.readFileSync(dbPath).toString());
logger_1.logger.info("validateDB");
if (!fs_1.default.existsSync(dbPath)) {

@@ -117,5 +149,12 @@ logger_1.logger.warn(dbPath + "does not exists.");

}
return validateJsonDB(jsonDB, dbPath);
const r = validateJsonDB(jsonDB, dbPath);
if (!r) {
logger_1.logger.warn(dbPath + " is not valid.");
process.exit(1);
}
else {
logger_1.logger.info(dbPath + " is valid.");
}
}
exports.validateDB = validateDB;
//# sourceMappingURL=validate_db.js.map

@@ -6,3 +6,3 @@ {

},
"version": "0.0.18",
"version": "0.0.19",
"description": "",

@@ -45,3 +45,2 @@ "main": "index.js",

"@types/pdf-parse": "^1.1.1",
"fp-ts": "^2.13.1",
"jest": "^29.3.1",

@@ -55,2 +54,3 @@ "prettier": "2.8.0",

"dependencies": {
"fp-ts": "^2.13.1",
"base-64": "^1.0.0",

@@ -57,0 +57,0 @@ "body-parser": "^1.20.1",

@@ -1,2 +0,2 @@

# This software is still experimental wait until 1.0.0 for heavy use.
# This software is still experimental. Please wait until 1.0.0 for heavy use.
# jendeley

@@ -32,3 +32,3 @@ `jendeley` is a JSON-based PDF paper organizing software.

- When the title of document includes spaces, the filename should also includes spaces.
- `RustHorn CHC-based Verification for Rust Programs [matushita].pdf
- `RustHorn CHC-based Verification for Rust Programs [matushita].pdf`
- If you want to write additional information in the filename, please surround by `[` and `]`.

@@ -66,6 +66,13 @@

```
> cat test_pdfs/db.json | jq '.[].title'
"MobileNets: Efficient Convolutional Neural Networks for Mobile Vision\n Applications"
"Deep Residual Learning for Image Recognition"
"A quantum hydrodynamical description for scrambling and many-body chaos"
> cat jendeley_db.json | jq '.' | head
{
"jendeley_meta": {
"idType": "meta",
"version": "0.0.17"
},
"doi_10.1145/1122445.1122456": {
"path": "/A Comprehensive Survey of Neural Architecture Search.pdf",
"idType": "doi",
"tags": [],
"comments": "",
```

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc