perf-marks
Advanced tools
Comparing version 1.13.4 to 1.14.0
@@ -10,2 +10,35 @@ # Change Log | ||
## [1.14.0][] - 2020-09-30 | ||
### Updated | ||
- Updating package dependencies | ||
- `end()`: Adding built-in mark to compare if the application is not passing the values. This will give us all the required information for the consumers. | ||
Without passing a mark to compare | ||
```js | ||
import * as PerfMarks from 'perf-marks'; | ||
... | ||
PerfMarks.start('name-of-your-mark'); | ||
... | ||
const markResults: PerfMarks.PerfMarksPerformanceEntry = PerfMarks.end('name-of-your-mark'); | ||
``` | ||
Passing a mark to compare | ||
```js | ||
import * as PerfMarks from 'perf-marks'; | ||
... | ||
PerfMarks.start('name-of-your-mark'); | ||
PerfMarks.start('name-of-your-mark-to-be-compared-with'); | ||
... | ||
const markResults: PerfMarks.PerfMarksPerformanceEntry = PerfMarks.end( | ||
'name-of-your-mark', | ||
'name-of-your-mark-to-be-compared-with' | ||
); | ||
``` | ||
## [1.13.4][] - 2020-08-21 | ||
@@ -363,5 +396,7 @@ | ||
[1.13.3]: https://github.com/willmendesneto/perf-marks/tree/v1.13.3 | ||
[unreleased]: https://github.com/willmendesneto/perf-marks/compare/v1.13.4...HEAD | ||
[1.13.4]: https://github.com/willmendesneto/perf-marks/tree/v1.13.4 | ||
[Unreleased]: https://github.com/willmendesneto/perf-marks/compare/v1.13.4...HEAD | ||
[1.13.4]: https://github.com/willmendesneto/perf-marks/tree/v1.13.4 | ||
[Unreleased]: https://github.com/willmendesneto/perf-marks/compare/v1.14.0...HEAD | ||
[1.14.0]: https://github.com/willmendesneto/perf-marks/tree/v1.14.0 |
@@ -35,3 +35,3 @@ "use strict"; | ||
// Removes PerformanceObserver references from memory | ||
if (marksObserver[markName]) { | ||
if (!!marksObserver[markName]) { | ||
marksObserver[markName] = undefined; | ||
@@ -85,7 +85,14 @@ } | ||
var startTime = marksMap[markName]; | ||
// NodeJS is not using performance api directly from them for now | ||
if (!is_user_timing_api_supported_1.isUserTimingAPISupported || is_nodejs_env_1.isNodeJSEnv) { | ||
// `performance.measure()` behaves diferently between frontend and | ||
// backend in Javascript applications. Using based on NodeJS docs | ||
performance.measure(markName, markName, markNameToCompare || markName); | ||
if (!is_user_timing_api_supported_1.isUserTimingAPISupported) { | ||
return startTime | ||
? { duration: getTimeNow() - startTime, startTime: startTime, entryType: 'measure', name: markName } | ||
: {}; | ||
} | ||
// If there's no User Timing mark to be compared with, | ||
// the package will create one to be used for better comparison | ||
if (!markNameToCompare) { | ||
performance.mark(markName + "-end"); | ||
} | ||
performance.measure(markName, markName, markNameToCompare || markName + "-end"); | ||
if (is_nodejs_env_1.isNodeJSEnv) { | ||
if (!!marksObserver[markName]) { | ||
@@ -98,3 +105,2 @@ return marksObserver[markName]; | ||
} | ||
performance.measure(markName, markName, markNameToCompare || undefined); | ||
var entry = performance.getEntriesByName(markName).pop(); | ||
@@ -114,5 +120,4 @@ return entry || {}; | ||
// Clear marks used for comparison in case of it's value was passed | ||
if (markNameToCompare) { | ||
clear(markNameToCompare); | ||
} | ||
// If the mark to compare is not passed, it should remove the one we create with `-end` suffix | ||
clear(markNameToCompare || markName + "-end"); | ||
} | ||
@@ -119,0 +124,0 @@ }; |
@@ -30,3 +30,3 @@ import { isUserTimingAPISupported } from './is-user-timing-api-supported'; | ||
// Removes PerformanceObserver references from memory | ||
if (marksObserver[markName]) { | ||
if (!!marksObserver[markName]) { | ||
marksObserver[markName] = undefined; | ||
@@ -78,7 +78,14 @@ } | ||
const startTime = marksMap[markName]; | ||
// NodeJS is not using performance api directly from them for now | ||
if (!isUserTimingAPISupported || isNodeJSEnv) { | ||
// `performance.measure()` behaves diferently between frontend and | ||
// backend in Javascript applications. Using based on NodeJS docs | ||
performance.measure(markName, markName, markNameToCompare || markName); | ||
if (!isUserTimingAPISupported) { | ||
return startTime | ||
? { duration: getTimeNow() - startTime, startTime, entryType: 'measure', name: markName } | ||
: {}; | ||
} | ||
// If there's no User Timing mark to be compared with, | ||
// the package will create one to be used for better comparison | ||
if (!markNameToCompare) { | ||
performance.mark(`${markName}-end`); | ||
} | ||
performance.measure(markName, markName, markNameToCompare || `${markName}-end`); | ||
if (isNodeJSEnv) { | ||
if (!!marksObserver[markName]) { | ||
@@ -91,3 +98,2 @@ return marksObserver[markName]; | ||
} | ||
performance.measure(markName, markName, markNameToCompare || undefined); | ||
const entry = performance.getEntriesByName(markName).pop(); | ||
@@ -107,5 +113,4 @@ return entry || {}; | ||
// Clear marks used for comparison in case of it's value was passed | ||
if (markNameToCompare) { | ||
clear(markNameToCompare); | ||
} | ||
// If the mark to compare is not passed, it should remove the one we create with `-end` suffix | ||
clear(markNameToCompare || `${markName}-end`); | ||
} | ||
@@ -112,0 +117,0 @@ }; |
@@ -30,3 +30,3 @@ import { isUserTimingAPISupported } from './is-user-timing-api-supported'; | ||
// Removes PerformanceObserver references from memory | ||
if (marksObserver[markName]) { | ||
if (!!marksObserver[markName]) { | ||
marksObserver[markName] = undefined; | ||
@@ -78,7 +78,14 @@ } | ||
const startTime = marksMap[markName]; | ||
// NodeJS is not using performance api directly from them for now | ||
if (!isUserTimingAPISupported || isNodeJSEnv) { | ||
// `performance.measure()` behaves diferently between frontend and | ||
// backend in Javascript applications. Using based on NodeJS docs | ||
performance.measure(markName, markName, markNameToCompare || markName); | ||
if (!isUserTimingAPISupported) { | ||
return startTime | ||
? { duration: getTimeNow() - startTime, startTime, entryType: 'measure', name: markName } | ||
: {}; | ||
} | ||
// If there's no User Timing mark to be compared with, | ||
// the package will create one to be used for better comparison | ||
if (!markNameToCompare) { | ||
performance.mark(`${markName}-end`); | ||
} | ||
performance.measure(markName, markName, markNameToCompare || `${markName}-end`); | ||
if (isNodeJSEnv) { | ||
if (!!marksObserver[markName]) { | ||
@@ -91,3 +98,2 @@ return marksObserver[markName]; | ||
} | ||
performance.measure(markName, markName, markNameToCompare || undefined); | ||
const entry = performance.getEntriesByName(markName).pop(); | ||
@@ -107,5 +113,4 @@ return entry || {}; | ||
// Clear marks used for comparison in case of it's value was passed | ||
if (markNameToCompare) { | ||
clear(markNameToCompare); | ||
} | ||
// If the mark to compare is not passed, it should remove the one we create with `-end` suffix | ||
clear(markNameToCompare || `${markName}-end`); | ||
} | ||
@@ -112,0 +117,0 @@ }; |
@@ -30,3 +30,3 @@ import { isUserTimingAPISupported } from './is-user-timing-api-supported'; | ||
// Removes PerformanceObserver references from memory | ||
if (marksObserver[markName]) { | ||
if (!!marksObserver[markName]) { | ||
marksObserver[markName] = undefined; | ||
@@ -78,7 +78,14 @@ } | ||
var startTime = marksMap[markName]; | ||
// NodeJS is not using performance api directly from them for now | ||
if (!isUserTimingAPISupported || isNodeJSEnv) { | ||
// `performance.measure()` behaves diferently between frontend and | ||
// backend in Javascript applications. Using based on NodeJS docs | ||
performance.measure(markName, markName, markNameToCompare || markName); | ||
if (!isUserTimingAPISupported) { | ||
return startTime | ||
? { duration: getTimeNow() - startTime, startTime: startTime, entryType: 'measure', name: markName } | ||
: {}; | ||
} | ||
// If there's no User Timing mark to be compared with, | ||
// the package will create one to be used for better comparison | ||
if (!markNameToCompare) { | ||
performance.mark(markName + "-end"); | ||
} | ||
performance.measure(markName, markName, markNameToCompare || markName + "-end"); | ||
if (isNodeJSEnv) { | ||
if (!!marksObserver[markName]) { | ||
@@ -91,3 +98,2 @@ return marksObserver[markName]; | ||
} | ||
performance.measure(markName, markName, markNameToCompare || undefined); | ||
var entry = performance.getEntriesByName(markName).pop(); | ||
@@ -107,5 +113,4 @@ return entry || {}; | ||
// Clear marks used for comparison in case of it's value was passed | ||
if (markNameToCompare) { | ||
clear(markNameToCompare); | ||
} | ||
// If the mark to compare is not passed, it should remove the one we create with `-end` suffix | ||
clear(markNameToCompare || markName + "-end"); | ||
} | ||
@@ -112,0 +117,0 @@ }; |
@@ -68,3 +68,3 @@ (function (global, factory) { | ||
// Removes PerformanceObserver references from memory | ||
if (marksObserver[markName]) { | ||
if (!!marksObserver[markName]) { | ||
marksObserver[markName] = undefined; | ||
@@ -116,7 +116,14 @@ } | ||
var startTime = marksMap[markName]; | ||
// NodeJS is not using performance api directly from them for now | ||
if (!isUserTimingAPISupported || isNodeJSEnv) { | ||
// `performance.measure()` behaves diferently between frontend and | ||
// backend in Javascript applications. Using based on NodeJS docs | ||
performance.measure(markName, markName, markNameToCompare || markName); | ||
if (!isUserTimingAPISupported) { | ||
return startTime | ||
? { duration: getTimeNow() - startTime, startTime: startTime, entryType: 'measure', name: markName } | ||
: {}; | ||
} | ||
// If there's no User Timing mark to be compared with, | ||
// the package will create one to be used for better comparison | ||
if (!markNameToCompare) { | ||
performance.mark(markName + "-end"); | ||
} | ||
performance.measure(markName, markName, markNameToCompare || markName + "-end"); | ||
if (isNodeJSEnv) { | ||
if (!!marksObserver[markName]) { | ||
@@ -129,3 +136,2 @@ return marksObserver[markName]; | ||
} | ||
performance.measure(markName, markName, markNameToCompare || undefined); | ||
var entry = performance.getEntriesByName(markName).pop(); | ||
@@ -145,5 +151,4 @@ return entry || {}; | ||
// Clear marks used for comparison in case of it's value was passed | ||
if (markNameToCompare) { | ||
clear(markNameToCompare); | ||
} | ||
// If the mark to compare is not passed, it should remove the one we create with `-end` suffix | ||
clear(markNameToCompare || markName + "-end"); | ||
} | ||
@@ -150,0 +155,0 @@ }; |
@@ -1,1 +0,1 @@ | ||
!function(e,r){"object"==typeof exports&&"undefined"!=typeof module?r(exports):"function"==typeof define&&define.amd?define(["exports"],r):r((e="undefined"!=typeof globalThis?globalThis:e||self).PerfMarks={})}(this,function(e){"use strict";function t(){return f?performance.now():Date.now()}function o(e){s[e]=void 0,p[e]&&(p[e]=void 0),f&&(a||performance.clearMeasures(e),performance.clearMarks(e))}function c(r){var n;f&&(a&&u&&(n=new PerformanceObserver(function(e){p[r]=e.getEntries().find(function(e){return e.name===r}),n.disconnect()})).observe({entryTypes:["measure"]}),performance.mark(r)),s[r]=t()}function i(e,r){try{var n=s[e];return!f||a?(performance.measure(e,e,r||e),p[e]?p[e]:n?{duration:t()-n,startTime:n,entryType:"measure",name:e}:{}):(performance.measure(e,e,r||void 0),performance.getEntriesByName(e).pop()||{})}catch(e){return{}}finally{o(e),r&&o(r)}}function r(e){return f&&!a&&performance.getEntriesByType(e)||[]}var a="[object process]"===Object.prototype.toString.call("undefined"!=typeof process?process:0),f="undefined"!=typeof performance&&void 0!==performance.now&&"function"==typeof performance.mark&&"function"==typeof performance.measure&&("function"==typeof performance.clearMarks||"function"==typeof performance.clearMeasures),u="undefined"!=typeof PerformanceObserver&&void 0!==PerformanceObserver.prototype&&"function"==typeof PerformanceObserver.prototype.constructor,s={},p={};function n(e,c,i,f){return new(i=i||Promise)(function(n,r){function t(e){try{a(f.next(e))}catch(e){r(e)}}function o(e){try{a(f.throw(e))}catch(e){r(e)}}function a(e){var r;e.done?n(e.value):((r=e.value)instanceof i?r:new i(function(e){e(r)})).then(t,o)}a((f=f.apply(e,c||[])).next())})}function l(n,t){var o,a,c,i={label:0,sent:function(){if(1&c[0])throw c[1];return c[1]},trys:[],ops:[]},e={next:r(0),throw:r(1),return:r(2)};return"function"==typeof Symbol&&(e[Symbol.iterator]=function(){return this}),e;function r(r){return function(e){return function(r){if(o)throw new TypeError("Generator is already executing.");for(;i;)try{if(o=1,a&&(c=2&r[0]?a.return:r[0]?a.throw||((c=a.return)&&c.call(a),0):a.next)&&!(c=c.call(a,r[1])).done)return c;switch(a=0,c&&(r=[2&r[0],c.value]),r[0]){case 0:case 1:c=r;break;case 4:return i.label++,{value:r[1],done:!1};case 5:i.label++,a=r[1],r=[0];continue;case 7:r=i.ops.pop(),i.trys.pop();continue;default:if(!(c=0<(c=i.trys).length&&c[c.length-1])&&(6===r[0]||2===r[0])){i=0;continue}if(3===r[0]&&(!c||r[1]>c[0]&&r[1]<c[3])){i.label=r[1];break}if(6===r[0]&&i.label<c[1]){i.label=c[1],c=r;break}if(c&&i.label<c[2]){i.label=c[2],i.ops.push(r);break}c[2]&&i.ops.pop(),i.trys.pop();continue}r=t.call(n,i)}catch(e){r=[6,e],a=0}finally{o=c=0}if(5&r[0])throw r[1];return{value:r[0]?r[1]:void 0,done:!0}}([r,e])}}}e.clear=o,e.clearAll=function(){s={},p={},f&&(a||performance.clearMeasures(),performance.clearMarks())},e.end=i,e.getEntriesByType=r,e.getNavigationMarker=function(){return r("navigation").pop()||{}},e.isNodeJSEnv=a,e.isPerformanceObservableSupported=u,e.isUserTimingAPISupported=f,e.profiler=function(o,a){return n(void 0,void 0,void 0,function(){var r,n,t;return l(this,function(e){switch(e.label){case 0:c(a),e.label=1;case 1:return e.trys.push([1,3,,4]),[4,o()];case 2:return r=e.sent(),n=i(a),[2,r?Object.assign({},{data:r,mark:n}):{mark:n}];case 3:throw t=e.sent(),i(a),t;case 4:return[2]}})})},e.start=c,Object.defineProperty(e,"__esModule",{value:!0})}); | ||
!function(e,r){"object"==typeof exports&&"undefined"!=typeof module?r(exports):"function"==typeof define&&define.amd?define(["exports"],r):r((e="undefined"!=typeof globalThis?globalThis:e||self).PerfMarks={})}(this,function(e){"use strict";function t(){return(f?performance:Date).now()}function o(e){s[e]=void 0,p[e]&&(p[e]=void 0),f&&(i||performance.clearMeasures(e),performance.clearMarks(e))}function a(r){var n;f&&(i&&u&&(n=new PerformanceObserver(function(e){p[r]=e.getEntries().find(function(e){return e.name===r}),n.disconnect()})).observe({entryTypes:["measure"]}),performance.mark(r)),s[r]=t()}function c(e,r){try{var n=s[e];return f?(r||performance.mark(e+"-end"),performance.measure(e,e,r||e+"-end"),i?p[e]?p[e]:n?{duration:t()-n,startTime:n,entryType:"measure",name:e}:{}:performance.getEntriesByName(e).pop()||{}):n?{duration:t()-n,startTime:n,entryType:"measure",name:e}:{}}catch(e){return{}}finally{o(e),o(r||e+"-end")}}function r(e){return f&&!i&&performance.getEntriesByType(e)||[]}var i="[object process]"===Object.prototype.toString.call("undefined"!=typeof process?process:0),f="undefined"!=typeof performance&&void 0!==performance.now&&"function"==typeof performance.mark&&"function"==typeof performance.measure&&("function"==typeof performance.clearMarks||"function"==typeof performance.clearMeasures),u="undefined"!=typeof PerformanceObserver&&void 0!==PerformanceObserver.prototype&&"function"==typeof PerformanceObserver.prototype.constructor,s={},p={};function n(e,c,i,f){return new(i=i||Promise)(function(n,r){function t(e){try{a(f.next(e))}catch(e){r(e)}}function o(e){try{a(f.throw(e))}catch(e){r(e)}}function a(e){var r;e.done?n(e.value):((r=e.value)instanceof i?r:new i(function(e){e(r)})).then(t,o)}a((f=f.apply(e,c||[])).next())})}function l(n,t){var o,a,c,i={label:0,sent:function(){if(1&c[0])throw c[1];return c[1]},trys:[],ops:[]},e={next:r(0),throw:r(1),return:r(2)};return"function"==typeof Symbol&&(e[Symbol.iterator]=function(){return this}),e;function r(r){return function(e){return function(r){if(o)throw new TypeError("Generator is already executing.");for(;i;)try{if(o=1,a&&(c=2&r[0]?a.return:r[0]?a.throw||((c=a.return)&&c.call(a),0):a.next)&&!(c=c.call(a,r[1])).done)return c;switch(a=0,c&&(r=[2&r[0],c.value]),r[0]){case 0:case 1:c=r;break;case 4:return i.label++,{value:r[1],done:!1};case 5:i.label++,a=r[1],r=[0];continue;case 7:r=i.ops.pop(),i.trys.pop();continue;default:if(!(c=0<(c=i.trys).length&&c[c.length-1])&&(6===r[0]||2===r[0])){i=0;continue}if(3===r[0]&&(!c||r[1]>c[0]&&r[1]<c[3])){i.label=r[1];break}if(6===r[0]&&i.label<c[1]){i.label=c[1],c=r;break}if(c&&i.label<c[2]){i.label=c[2],i.ops.push(r);break}c[2]&&i.ops.pop(),i.trys.pop();continue}r=t.call(n,i)}catch(e){r=[6,e],a=0}finally{o=c=0}if(5&r[0])throw r[1];return{value:r[0]?r[1]:void 0,done:!0}}([r,e])}}}e.clear=o,e.clearAll=function(){s={},p={},f&&(i||performance.clearMeasures(),performance.clearMarks())},e.end=c,e.getEntriesByType=r,e.getNavigationMarker=function(){return r("navigation").pop()||{}},e.isNodeJSEnv=i,e.isPerformanceObservableSupported=u,e.isUserTimingAPISupported=f,e.profiler=function(t,o){return n(void 0,void 0,void 0,function(){var r,n;return l(this,function(e){switch(e.label){case 0:a(o),e.label=1;case 1:return e.trys.push([1,3,,4]),[4,t()];case 2:return r=e.sent(),n=c(o),[2,r?Object.assign({},{data:r,mark:n}):{mark:n}];case 3:throw n=e.sent(),c(o),n;case 4:return[2]}})})},e.start=a,Object.defineProperty(e,"__esModule",{value:!0})}); |
{ | ||
"name": "perf-marks", | ||
"version": "1.13.4", | ||
"version": "1.14.0", | ||
"author": "Will Mendes <willmendesneto@gmail.com>", | ||
@@ -31,6 +31,6 @@ "description": "The simplest and lightweight solution for User Timing API in Javascript.", | ||
"devDependencies": { | ||
"@types/jest": "^26.0.9", | ||
"@types/node": "^14.0.27", | ||
"@typescript-eslint/eslint-plugin": "^3.8.0", | ||
"@typescript-eslint/parser": "^3.8.0", | ||
"@types/jest": "^26.0.14", | ||
"@types/node": "^14.11.2", | ||
"@typescript-eslint/eslint-plugin": "^4.3.0", | ||
"@typescript-eslint/parser": "^4.3.0", | ||
"bundlesize": "^0.18.0", | ||
@@ -40,19 +40,19 @@ "changelog-verify": "^1.1.0", | ||
"depcheck": "^1.2.0", | ||
"eslint": "^7.6.0", | ||
"eslint-config-prettier": "^6.11.0", | ||
"eslint": "^7.10.0", | ||
"eslint-config-prettier": "^6.12.0", | ||
"eslint-plugin-compat": "^3.3.0", | ||
"eslint-plugin-prettier": "^3.1.3", | ||
"husky": "^4.2.5", | ||
"jest": "^26.2.2", | ||
"jsdom": "^16.2.2", | ||
"lint-staged": "^10.2.2", | ||
"husky": "^4.3.0", | ||
"jest": "^26.4.2", | ||
"jsdom": "^16.4.0", | ||
"lint-staged": "^10.4.0", | ||
"npm-run-all": "^4.1.5", | ||
"prettier": "^2.0.5", | ||
"rollup": "^2.9.1", | ||
"prettier": "^2.1.2", | ||
"rollup": "^2.28.2", | ||
"rollup-plugin-node-resolve": "^5.2.0", | ||
"ts-jest": "^26.1.4", | ||
"ts-node": "^8.10.1", | ||
"typescript": "^3.9.2", | ||
"ts-jest": "^26.4.1", | ||
"ts-node": "^9.0.0", | ||
"typescript": "^4.0.3", | ||
"typings": "^2.1.1", | ||
"uglify-js": "^3.10.1", | ||
"uglify-js": "^3.11.0", | ||
"usertiming": "^0.1.8", | ||
@@ -59,0 +59,0 @@ "version-changelog": "^3.1.0" |
@@ -114,3 +114,3 @@ # Perf-marks | ||
### `PerfMarks.end(markName)` | ||
### `PerfMarks.end(markName, markNameToCompare)` | ||
@@ -121,2 +121,6 @@ Returns the results for the specified marker. | ||
If `markNameToCompare` value is not passed, the package will create a mark using `markName + '-end'`. Otherwise, it will compare based on the given mark. | ||
> If you're passing `markNameToCompare` value, please make sure you're also started the mark with the same name previously | ||
```js | ||
@@ -123,0 +127,0 @@ import * as PerfMarks from 'perf-marks'; |
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Shell access
Supply chain riskThis module accesses the system shell. Accessing the system shell increases the risk of executing arbitrary code.
Found 1 instance in 1 package
Debug access
Supply chain riskUses debug, reflection and dynamic code execution features.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
107251
110
2030
348
1