From 376a8c4f6ba06b820b5a69a29f014a9943503fcd Mon Sep 17 00:00:00 2001 From: Yagiz Nizipli Date: Mon, 5 Sep 2022 12:56:57 -0400 Subject: [PATCH 1/7] feat: bump typescript to 4.8.2 (#1550) * feat: bump typescript to 4.8.2 * feat: bump tsd and ts-node --- package.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/package.json b/package.json index 4b2039a5e..ec9d8e376 100644 --- a/package.json +++ b/package.json @@ -103,9 +103,9 @@ "tap": "^16.2.0", "tape": "^5.5.3", "through2": "^4.0.0", - "ts-node": "^10.8.1", - "tsd": "^0.22.0", - "typescript": "^4.7.3", + "ts-node": "^10.9.1", + "tsd": "^0.23.0", + "typescript": "^4.8.2", "winston": "^3.7.2" }, "dependencies": { From dc4e8546c0a4bedb087cbcccd9a48b56100b0ad7 Mon Sep 17 00:00:00 2001 From: Tommy Dew Date: Tue, 6 Sep 2022 01:58:05 +0800 Subject: [PATCH 2/7] Document how error is received from worker transport (#1548) * document how error is received from worker transport * add a link to nodejs worker thread doc * improve doc based on review * ops, missed a few places * Update docs/transports.md Co-authored-by: James Sumners * add a line between sentences Co-authored-by: James Sumners --- docs/transports.md | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/docs/transports.md b/docs/transports.md index 52ea4b9b6..075e0ecf1 100644 --- a/docs/transports.md +++ b/docs/transports.md @@ -887,3 +887,32 @@ $ node app.js | pino-websocket -a my-websocket-server.example.com -p 3004 For full documentation of command line switches read the [README](https://github.com/abeai/pino-websocket#readme). [pino-pretty]: https://github.com/pinojs/pino-pretty + + +## Communication between Pino and Transports +Here we discuss some technical details of how Pino communicates with its [worker threads](https://nodejs.org/api/worker_threads.html). + +Pino uses [`thread-stream`](https://github.com/pinojs/thread-stream) to create a stream for transports. +When we create a stream with `thread-stream`, `thread-stream` spawns a [worker](https://github.com/pinojs/thread-stream/blob/f19ac8dbd602837d2851e17fbc7dfc5bbc51083f/index.js#L50-L60) (an independent JavaScript execution thread). + + +### Error messages +How are error messages propagated from a transport worker to Pino? + +Let's assume we have a transport with an error listener: +```js +// index.js +const transport = pino.transport({ + target: './transport.js' +}) + +transport.on('error', err => { + console.error('error caught', err) +}) + +const log = pino(transport) +``` + +When our worker emits an error event, the worker has listeners for it: [error](https://github.com/pinojs/thread-stream/blob/f19ac8dbd602837d2851e17fbc7dfc5bbc51083f/lib/worker.js#L59-L70) and [unhandledRejection](https://github.com/pinojs/thread-stream/blob/f19ac8dbd602837d2851e17fbc7dfc5bbc51083f/lib/worker.js#L135-L141). These listeners send the error message to the main thread where Pino is present. + +When Pino receives the error message, it further [emits](https://github.com/pinojs/thread-stream/blob/f19ac8dbd602837d2851e17fbc7dfc5bbc51083f/index.js#L349) the error message. Finally, the error message arrives at our `index.js` and is caught by our error listener. From 44fe7d1c633f801f6eb39b8fea087357f63b0b3b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 12 Sep 2022 15:17:33 +0000 Subject: [PATCH 3/7] build(deps-dev): bump jest from 28.1.3 to 29.0.3 (#1553) Bumps [jest](https://github.com/facebook/jest/tree/HEAD/packages/jest) from 28.1.3 to 29.0.3. - [Release notes](https://github.com/facebook/jest/releases) - [Changelog](https://github.com/facebook/jest/blob/main/CHANGELOG.md) - [Commits](https://github.com/facebook/jest/commits/v29.0.3/packages/jest) --- updated-dependencies: - dependency-name: jest dependency-type: direct:development update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index ec9d8e376..b05d2caf4 100644 --- a/package.json +++ b/package.json @@ -88,7 +88,7 @@ "fastbench": "^1.0.1", "flush-write-stream": "^2.0.0", "import-fresh": "^3.2.1", - "jest": "^28.1.0", + "jest": "^29.0.3", "log": "^6.0.0", "loglevel": "^1.6.7", "pino-pretty": "^9.0.0", From a7695391125284fb2fe8b65e6a62e4051ff36bbe Mon Sep 17 00:00:00 2001 From: Simen Bekkhus Date: Fri, 16 Sep 2022 11:36:03 +0200 Subject: [PATCH 4/7] fix: add `this` to `hooks.logMethod` (#1559) --- pino.d.ts | 2 +- test/types/pino.test-d.ts | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/pino.d.ts b/pino.d.ts index 8e7fe6ea9..e0f7466e7 100644 --- a/pino.d.ts +++ b/pino.d.ts @@ -600,7 +600,7 @@ declare namespace pino { * log method and method is the log method itself, and level is the log level. This hook must invoke the method function by * using apply, like so: method.apply(this, newArgumentsArray). */ - logMethod?: (args: any[], method: LogFn, level: number) => void; + logMethod?: (this: Logger, args: any[], method: LogFn, level: number) => void; }; /** diff --git a/test/types/pino.test-d.ts b/test/types/pino.test-d.ts index 57b8ca43c..e4ef8c885 100644 --- a/test/types/pino.test-d.ts +++ b/test/types/pino.test-d.ts @@ -1,7 +1,7 @@ import P, { pino } from "../../"; import { IncomingMessage, ServerResponse } from "http"; import { Socket } from "net"; -import { expectError } from 'tsd' +import { expectError, expectType } from 'tsd' import Logger = P.Logger; const log = pino(); @@ -229,6 +229,7 @@ const withNestedKey = pino({ const withHooks = pino({ hooks: { logMethod(args, method, level) { + expectType(this); return method.apply(this, ['msg', ...args]); }, }, @@ -344,4 +345,4 @@ cclog3.childLevel2('') const withChildCallback = pino({ onChild: (child: Logger) => {} }) -withChildCallback.onChild = (child: Logger) => {} \ No newline at end of file +withChildCallback.onChild = (child: Logger) => {} From 73678ffd5b17b5e67cba824c53442ee889f4ab06 Mon Sep 17 00:00:00 2001 From: Frazer Smith Date: Fri, 16 Sep 2022 10:48:44 +0100 Subject: [PATCH 5/7] docs: grammar and spelling fixes (#1560) * docs: grammar and spelling fixes * docs: more fixes --- README.md | 14 ++++++------ docs/api.md | 44 ++++++++++++++++++------------------ docs/asynchronous.md | 12 +++++----- docs/browser.md | 20 ++++++++--------- docs/bundling.md | 8 +++---- docs/child-loggers.md | 8 +++---- docs/ecosystem.md | 6 ++--- docs/help.md | 18 +++++++-------- docs/pretty.md | 10 ++++----- docs/redaction.md | 8 +++---- docs/transports.md | 52 +++++++++++++++++++++---------------------- docs/web.md | 6 ++--- 12 files changed, 103 insertions(+), 103 deletions(-) diff --git a/README.md b/README.md index 3a546c669..8ce69b1fa 100644 --- a/README.md +++ b/README.md @@ -72,10 +72,10 @@ format logs during development: ### Transports & Log Processing Due to Node's single-threaded event-loop, it's highly recommended that sending, -alert triggering, reformatting and all forms of log processing -be conducted in a separate process or thread. +alert triggering, reformatting, and all forms of log processing +are conducted in a separate process or thread. -In Pino terminology we call all log processors "transports", and recommend that the +In Pino terminology, we call all log processors "transports" and recommend that the transports be run in a worker thread using our `pino.transport` API. For more details see our [Transports⇗](docs/transports.md) document. @@ -92,9 +92,9 @@ See the [Benchmarks](docs/benchmarks.md) document for comparisons. ### Bundling support -Pino supports to being bundled using tools like webpack or esbuild. +Pino supports being bundled using tools like webpack or esbuild. -See [Bundling](docs/bundling.md) document for more informations. +See [Bundling](docs/bundling.md) document for more information. ## The Team @@ -139,8 +139,8 @@ Pino is an **OPEN Open Source Project**. This means that: See the [CONTRIBUTING.md](https://github.com/pinojs/pino/blob/master/CONTRIBUTING.md) file for more details. - -## Acknowledgements + +## Acknowledgments This project was kindly sponsored by [nearForm](https://nearform.com). diff --git a/docs/api.md b/docs/api.md index 8e0004c39..cc156414f 100644 --- a/docs/api.md +++ b/docs/api.md @@ -40,7 +40,7 @@ ## `pino([options], [destination]) => logger` The exported `pino` function takes two optional arguments, -[`options`](#options) and [`destination`](#destination) and +[`options`](#options) and [`destination`](#destination), and returns a [logger instance](#logger). @@ -70,7 +70,7 @@ Additional levels can be added to the instance via the `customLevels` option. Default: `undefined` Use this option to define additional logging levels. -The keys of the object correspond the namespace of the log level, +The keys of the object correspond to the namespace of the log level, and the values should be the numerical value of the level. ```js @@ -88,7 +88,7 @@ logger.foo('hi') Default: `false` Use this option to only use defined `customLevels` and omit Pino's levels. -Logger's default `level` must be changed to a value in `customLevels` in order to use `useOnlyCustomLevels` +Logger's default `level` must be changed to a value in `customLevels` to use `useOnlyCustomLevels` Warning: this option may not be supported by downstream transports. ```js @@ -100,13 +100,13 @@ const logger = pino({ level: 'foo' }) logger.foo('hi') -logger.info('hello') // Will throw an error saying info in not found in logger object +logger.info('hello') // Will throw an error saying info is not found in logger object ``` #### `depthLimit` (Number) Default: `5` -Option to limit stringification at a specific nesting depth when logging circular object. +Option to limit stringification at a specific nesting depth when logging circular objects. #### `edgeLimit` (Number) @@ -266,11 +266,11 @@ Default: `undefined` As an array, the `redact` option specifies paths that should have their values redacted from any log output. -Each path must be a string using a syntax which corresponds to JavaScript dot and bracket notation. +Each path must be a string using a syntax that corresponds to JavaScript dot and bracket notation. If an object is supplied, three options can be specified: * `paths` (array): Required. An array of paths. See [redaction - Path Syntax ⇗](/docs/redaction.md#paths) for specifics. - * `censor` (String|Function|Undefined): Optional. When supplied as a String the `censor` option will overwrite keys which are to be redacted. When set to `undefined` the key will be removed entirely from the object. + * `censor` (String|Function|Undefined): Optional. When supplied as a String the `censor` option will overwrite keys that are to be redacted. When set to `undefined` the key will be removed entirely from the object. The `censor` option may also be a mapping function. The (synchronous) mapping function has the signature `(value, path) => redactedValue` and is called with the unredacted `value` and `path` to the key being redacted, as an array. For example given a redaction path of `a.b.c` the `path` argument would be `['a', 'b', 'c']`. The value returned from the mapping function becomes the applied censor value. Default: `'[Redacted]'` value synchronously. Default: `'[Redacted]'` @@ -356,7 +356,7 @@ const formatters = { Changes the shape of the log object. This function will be called every time one of the log methods (such as `.info`) is called. All arguments passed to the -log method, except the message, will be pass to this function. By default it does +log method, except the message, will be passed to this function. By default, it does not change the shape of the log object. ```js @@ -503,7 +503,7 @@ pino({ transport: {}}, '/path/to/somewhere') // THIS WILL NOT WORK, DO NOT DO TH pino({ transport: {}}, process.stderr) // THIS WILL NOT WORK, DO NOT DO THIS ``` -when using the `transport` option. In this case an `Error` will be thrown. +when using the `transport` option. In this case, an `Error` will be thrown. * See [pino.transport()](#pino-transport) @@ -513,7 +513,7 @@ The `onChild` function is a synchronous callback that will be called on each cre Any error thrown inside the callback will be uncaught and should be handled inside the callback. ```js const parent = require('pino')({ onChild: (instance) => { - // Exceute call back code for each newly created child. + // Execute call back code for each newly created child. }}) // `onChild` will now be executed with the new child. parent.child(bindings) @@ -567,7 +567,7 @@ path, e.g. `/tmp/1`. Default: `false` Using the global symbol `Symbol.for('pino.metadata')` as a key on the `destination` parameter and -setting the key it to `true`, indicates that the following properties should be +setting the key to `true`, indicates that the following properties should be set on the `destination` object after each log line is written: * the last logging level as `destination.lastLevel` @@ -613,7 +613,7 @@ The parameters are explained below using the `logger.info` method but the same a #### `mergingObject` (Object) An object can optionally be supplied as the first parameter. Each enumerable key and value -of the `mergingObject` is copied in to the JSON log line. +of the `mergingObject` is copied into the JSON log line. ```js logger.info({MIX: {IN: true}}) @@ -658,7 +658,7 @@ the following placeholders: * `%s` – string placeholder * `%d` – digit placeholder -* `%O`, `%o` and `%j` – object placeholder +* `%O`, `%o`, and `%j` – object placeholder Values supplied as additional arguments to the logger method will then be interpolated accordingly. @@ -776,7 +776,7 @@ Write a `'error'` level log, if the configured `level` allows for it. Write a `'fatal'` level log, if the configured `level` allows for it. -Since `'fatal'` level messages are intended to be logged just prior to the process exiting the `fatal` +Since `'fatal'` level messages are intended to be logged just before the process exiting the `fatal` method will always sync flush the destination. Therefore it's important not to misuse `fatal` since it will cause performance overhead if used for any @@ -832,7 +832,7 @@ Options for child logger. These options will override the parent logger options. ##### `options.level` (String) The `level` property overrides the log level of the child logger. -By default the parent log level is inherited. +By default, the parent log level is inherited. After the creation of the child logger, it is also accessible using the [`logger.level`](#logger-level) key. ```js @@ -921,9 +921,9 @@ The core levels and their values are as follows: The logging level is a *minimum* level based on the associated value of that level. -For instance if `logger.level` is `info` *(30)* then `info` *(30)*, `warn` *(40)*, `error` *(50)* and `fatal` *(60)* log methods will be enabled but the `trace` *(10)* and `debug` *(20)* methods, being less than 30, will not. +For instance if `logger.level` is `info` *(30)* then `info` *(30)*, `warn` *(40)*, `error` *(50)*, and `fatal` *(60)* log methods will be enabled but the `trace` *(10)* and `debug` *(20)* methods, being less than 30, will not. -The `silent` logging level is a specialized level which will disable all logging, +The `silent` logging level is a specialized level that will disable all logging, the `silent` log method is a noop function. @@ -994,7 +994,7 @@ $ node -p "require('pino')().levels" ### logger\[Symbol.for('pino.serializers')\] Returns the serializers as applied to the current logger instance. If a child logger did not -register it's own serializer upon instantiation the serializers of the parent will be returned. +register its own serializer upon instantiation the serializers of the parent will be returned. ### Event: 'level-change' @@ -1079,7 +1079,7 @@ A `pino.destination` instance can also be used to reopen closed files ### `pino.transport(options) => ThreadStream` -Create a a stream that routes logs to a worker thread that +Create a stream that routes logs to a worker thread that wraps around a [Pino Transport](/docs/transports.md). ```js @@ -1122,7 +1122,7 @@ const transport = pino.transport({ pino(transport) ``` -If `WeakRef`, `WeakMap` and `FinalizationRegistry` are available in the current runtime (v14.5.0+), then the thread +If `WeakRef`, `WeakMap`, and `FinalizationRegistry` are available in the current runtime (v14.5.0+), then the thread will be automatically terminated in case the stream or logger goes out of scope. The `transport()` function adds a listener to `process.on('beforeExit')` and `process.on('exit')` to ensure the worker is flushed and all data synced before the process exits. @@ -1242,7 +1242,7 @@ The `pino.stdSerializers` object provides functions for serializing objects comm ### `pino.stdTimeFunctions` (Object) -The [`timestamp`](#opt-timestamp) option can accept a function which determines the +The [`timestamp`](#opt-timestamp) option can accept a function that determines the `timestamp` value in a log line. The `pino.stdTimeFunctions` object provides a very small set of common functions for generating the @@ -1258,7 +1258,7 @@ The `pino.stdTimeFunctions` object provides a very small set of common functions ### `pino.symbols` (Object) -For integration purposes with ecosystem and third party libraries `pino.symbols` +For integration purposes with ecosystem and third-party libraries `pino.symbols` exposes the symbols used to hold non-public state and methods on the logger instance. Access to the symbols allows logger state to be adjusted, and methods to be overridden or diff --git a/docs/asynchronous.md b/docs/asynchronous.md index 487ebf81c..242199280 100644 --- a/docs/asynchronous.md +++ b/docs/asynchronous.md @@ -1,6 +1,6 @@ # Asynchronous Logging -In essence, asynchronous logging enables the minimum overhead of Pino. +Asynchronous logging enables the minimum overhead of Pino. Asynchronous logging works by buffering log messages and writing them in larger chunks. ```js @@ -13,16 +13,16 @@ const logger = pino(pino.destination({ ``` It's always possible to turn on synchronous logging by passing `sync: true`. -In this mode of operation log messages are directly written to the -output stream, as the messages are generated with a _blocking_ operation. +In this mode of operation, log messages are directly written to the +output stream as the messages are generated with a _blocking_ operation. * See [`pino.destination`](/docs/api.md#pino-destination) * `pino.destination` is implemented on [`sonic-boom` ⇗](https://github.com/mcollina/sonic-boom). ### AWS Lambda -Asynchronous logging is disabled by default on AWS Lambda, or any other environment -that modifies `process.stdout`. If forcefully turned on, we recommend to call `dest.flushSync()` at the end +Asynchronous logging is disabled by default on AWS Lambda or any other environment +that modifies `process.stdout`. If forcefully turned on, we recommend calling `dest.flushSync()` at the end of each function execution to avoid losing data. ## Caveats @@ -36,5 +36,5 @@ Asynchronous logging has a couple of important caveats: See also: -* [`pino.destination` api](/docs/api.md#pino-destination) +* [`pino.destination` API](/docs/api.md#pino-destination) * [`destination` parameter](/docs/api.md#destination) diff --git a/docs/browser.md b/docs/browser.md index 0bfa42d11..394de875b 100644 --- a/docs/browser.md +++ b/docs/browser.md @@ -1,6 +1,6 @@ # Browser API -Pino is compatible with [`browserify`](https://npm.im/browserify) for browser side usage: +Pino is compatible with [`browserify`](https://npm.im/browserify) for browser-side usage: This can be useful with isomorphic/universal JavaScript code. @@ -101,7 +101,7 @@ pino.info({custom: 'a', another: 'b'}) ``` When `serialize` is `true` the standard error serializer is also enabled (see https://github.com/pinojs/pino/blob/master/docs/api.md#stdSerializers). -This is a global serializer which will apply to any `Error` objects passed to the logger methods. +This is a global serializer, which will apply to any `Error` objects passed to the logger methods. If `serialize` is an array the standard error serializer is also automatically enabled, it can be explicitly disabled by including a string in the serialize array: `!stdSerializers.err`, like so: @@ -141,7 +141,7 @@ message and a `logEvent` object. The `logEvent` object is a data structure representing a log message, it represents the arguments passed to a logger statement, the level -at which they were logged and the hierarchy of child bindings. +at which they were logged, and the hierarchy of child bindings. The `logEvent` format is structured like so: @@ -154,25 +154,25 @@ The `logEvent` format is structured like so: } ``` -The `ts` property is a unix epoch timestamp in milliseconds, the time is taken from the moment the +The `ts` property is a Unix epoch timestamp in milliseconds, the time is taken from the moment the logger method is called. The `messages` array is all arguments passed to logger method, (for instance `logger.info('a', 'b', 'c')` would result in `messages` array `['a', 'b', 'c']`). The `bindings` array represents each child logger (if any), and the relevant bindings. -For instance given `logger.child({a: 1}).child({b: 2}).info({c: 3})`, the bindings array +For instance, given `logger.child({a: 1}).child({b: 2}).info({c: 3})`, the bindings array would hold `[{a: 1}, {b: 2}]` and the `messages` array would be `[{c: 3}]`. The `bindings` are ordered according to their position in the child logger hierarchy, with the lowest index being the top of the hierarchy. -By default serializers are not applied to log output in the browser, but they will *always* be +By default, serializers are not applied to log output in the browser, but they will *always* be applied to `messages` and `bindings` in the `logEvent` object. This allows us to ensure a consistent format for all values between server and client. The `level` holds the label (for instance `info`), and the corresponding numerical value -(for instance `30`). This could be important in cases where client side level values and -labels differ from server side. +(for instance `30`). This could be important in cases where client-side level values and +labels differ from server-side. The point of the `send` function is to remotely record log messages: @@ -184,7 +184,7 @@ const pino = require('pino')({ send: function (level, logEvent) { if (level === 'warn') { // maybe send the logEvent to a separate endpoint - // or maybe analyse the messages further before sending + // or maybe analyze the messages further before sending } // we could also use the `logEvent.level.value` property to determine // numerical value @@ -205,4 +205,4 @@ const pino = require('pino')({browser: {disabled: true}}) ``` The `disabled` option will disable logging in browser if set -to `true`. Default is set to `false`. +to `true`, by default it is set to `false`. diff --git a/docs/bundling.md b/docs/bundling.md index 34bddce0b..6467b8e52 100644 --- a/docs/bundling.md +++ b/docs/bundling.md @@ -2,7 +2,7 @@ Due to its internal architecture based on Worker Threads, it is not possible to bundle Pino *without* generating additional files. -In particular, a bundler must ensure that the following files are also bundle separately: +In particular, a bundler must ensure that the following files are also bundled separately: * `lib/worker.js` from the `thread-stream` dependency * `file.js` @@ -10,9 +10,9 @@ In particular, a bundler must ensure that the following files are also bundle se * `lib/worker-pipeline.js` * Any transport used by the user (like `pino-pretty`) -Once the files above have been generated, the bundler must also add information about the files above by injecting a code which sets `__bundlerPathsOverrides` in the `globalThis` object. +Once the files above have been generated, the bundler must also add information about the files above by injecting a code that sets `__bundlerPathsOverrides` in the `globalThis` object. -The variable is a object whose keys are identifier for the files and the values are the paths of files relative to the currently bundle files. +The variable is an object whose keys are an identifier for the files and the values are the paths of files relative to the currently bundle files. Example: @@ -27,7 +27,7 @@ globalThis.__bundlerPathsOverrides = { }; ``` -Note that `pino/file`, `pino-worker`, `pino-pipeline-worker` and `thread-stream-worker` are required identifiers. Other identifiers are possible based on the user configuration. +Note that `pino/file`, `pino-worker`, `pino-pipeline-worker`, and `thread-stream-worker` are required identifiers. Other identifiers are possible based on the user configuration. ## Webpack Plugin diff --git a/docs/child-loggers.md b/docs/child-loggers.md index de9163f02..6890e8419 100644 --- a/docs/child-loggers.md +++ b/docs/child-loggers.md @@ -50,7 +50,7 @@ benchPinoExtremeChildChild*10000: 127.753ms ## Duplicate keys caveat -It's possible for naming conflicts to arise between child loggers and +Naming conflicts can arise between child loggers and children of child loggers. This isn't as bad as it sounds, even if the same keys between @@ -71,10 +71,10 @@ $ cat my-log {"pid":95469,"hostname":"MacBook-Pro-3.home","level":30,"msg":"howdy","time":1459534114473,"a":"property","a":"prop"} ``` -Notice how there's two key's named `a` in the JSON output. The sub-childs properties +Notice how there are two keys named `a` in the JSON output. The sub-childs properties appear after the parent child properties. -At some point the logs will most likely be processed (for instance with a [transport](transports.md)), +At some point, the logs will most likely be processed (for instance with a [transport](transports.md)), and this generally involves parsing. `JSON.parse` will return an object where the conflicting namespace holds the final value assigned to it: @@ -92,4 +92,4 @@ in light of an expected log processing approach. One of Pino's performance tricks is to avoid building objects and stringifying them, so we're building strings instead. This is why duplicate keys between -parents and children will end up in log output. +parents and children will end up in the log output. diff --git a/docs/ecosystem.md b/docs/ecosystem.md index 160de2955..b1611b145 100644 --- a/docs/ecosystem.md +++ b/docs/ecosystem.md @@ -43,7 +43,7 @@ in a MongoDB database. + [`pino-noir`](https://github.com/pinojs/pino-noir): redact sensitive information in logs. + [`pino-pretty`](https://github.com/pinojs/pino-pretty): basic prettifier to -make log lines human readable. +make log lines human-readable. + [`pino-socket`](https://github.com/pinojs/pino-socket): send logs to TCP or UDP destinations. + [`pino-std-serializers`](https://github.com/pinojs/pino-std-serializers): the @@ -64,7 +64,7 @@ the logger for the [Rill framework](https://rill.site/). + [`pino-colada`](https://github.com/lrlna/pino-colada): cute ndjson formatter for pino. + [`pino-fluentd`](https://github.com/davidedantonio/pino-fluentd): send Pino logs to Elasticsearch, -MongoDB and many [others](https://www.fluentd.org/dataoutputs) via Fluentd. +MongoDB, and many [others](https://www.fluentd.org/dataoutputs) via Fluentd. + [`pino-pretty-min`](https://github.com/unjello/pino-pretty-min): a minimal prettifier inspired by the [logrus](https://github.com/sirupsen/logrus) logger. + [`pino-rotating-file`](https://github.com/homeaway/pino-rotating-file): a hapi-pino log transport for splitting logs into separate, automatically rotating files. @@ -73,4 +73,4 @@ prettifier inspired by the [logrus](https://github.com/sirupsen/logrus) logger. + [`pino-dev`](https://github.com/dnjstrom/pino-dev): simple prettifier for pino with built-in support for common ecosystem packages. + [`@newrelic/pino-enricher`](https://github.com/newrelic/newrelic-node-log-extensions/blob/main/packages/pino-log-enricher): a log customization to add New Relic context to use [Logs In Context](https://docs.newrelic.com/docs/logs/logs-context/logs-in-context/) + [`pino-lambda`](https://github.com/FormidableLabs/pino-lambda): log transport for cloudwatch support inside aws-lambda -+ [`cloud-pine`](https://github.com/metcoder95/cloud-pine): transport that provide abstraction and compatibility with [`@google-cloud/logging`](https://www.npmjs.com/package/@google-cloud/logging). ++ [`cloud-pine`](https://github.com/metcoder95/cloud-pine): transport that provides abstraction and compatibility with [`@google-cloud/logging`](https://www.npmjs.com/package/@google-cloud/logging). diff --git a/docs/help.md b/docs/help.md index 3760eefa1..02723d42a 100644 --- a/docs/help.md +++ b/docs/help.md @@ -47,9 +47,9 @@ help. ## Reopening log files -In cases where a log rotation tool doesn't offer a copy-truncate capabilities, +In cases where a log rotation tool doesn't offer copy-truncate capabilities, or where using them is deemed inappropriate, `pino.destination` -is able to reopen file paths after a file has been moved away. +can reopen file paths after a file has been moved away. One way to use this is to set up a `SIGUSR2` or `SIGHUP` signal handler that reopens the log file destination, making sure to write the process PID out @@ -124,7 +124,7 @@ Pino's default log destination is the singular destination of `stdout`. While not recommended for performance reasons, multiple destinations can be targeted by using [`pino.multistream`](/doc/api.md#pino-multistream). -In this example we use `stderr` for `error` level logs and `stdout` as default +In this example, we use `stderr` for `error` level logs and `stdout` as default for all other levels (e.g. `debug`, `info`, and `warn`). ```js @@ -155,7 +155,7 @@ for information on this is handled. Pino log lines are meant to be parseable. Thus, Pino's default mode of operation is to print the level value instead of the string name. However, you can use the [`formatters`](/docs/api.md#formatters-object) option -with a [`level`](/docs/api.md#level) function to print the string name instead the level value : +with a [`level`](/docs/api.md#level) function to print the string name instead of the level value : ```js const pino = require('pino') @@ -175,7 +175,7 @@ log.info('message') // {"level":"info","time":1661632832200,"pid":18188,"hostname":"foo","msg":"message"} ``` -Although it is works, we recommend using one of these options instead if you are able: +Although it works, we recommend using one of these options instead if you are able: 1. If the only change desired is the name then a transport can be used. One such transport is [`pino-text-level-transport`](https://npm.im/pino-text-level-transport). @@ -202,7 +202,7 @@ $ npm i pino-debug $ DEBUG=* node -r pino-debug app.js ``` -[`pino-debug`](https://github.com/pinojs/pino-debug) also offers fine grain control to map specific `debug` +[`pino-debug`](https://github.com/pinojs/pino-debug) also offers fine-grain control to map specific `debug` namespaces to `pino` log levels. See [`pino-debug`](https://github.com/pinojs/pino-debug) for more. @@ -211,8 +211,8 @@ for more. Pino uses [sonic-boom](https://github.com/mcollina/sonic-boom) to speed up logging. Internally, it uses [`fs.write`](https://nodejs.org/dist/latest-v10.x/docs/api/fs.html#fs_fs_write_fd_string_position_encoding_callback) to write log lines directly to a file -descriptor. On Windows, unicode output is not handled properly in the -terminal (both `cmd.exe` and powershell), and as such the output could +descriptor. On Windows, Unicode output is not handled properly in the +terminal (both `cmd.exe` and PowerShell), and as such the output could be visualized incorrectly if the log lines include utf8 characters. It is possible to configure the terminal to visualize those characters correctly with the use of [`chcp`](https://ss64.com/nt/chcp.html) by @@ -222,7 +222,7 @@ Node.js. ## Mapping Pino Log Levels to Google Cloud Logging (Stackdriver) Severity Levels -Google Cloud Logging uses `severity` levels instead log levels. As a result, all logs may show as INFO +Google Cloud Logging uses `severity` levels instead of log levels. As a result, all logs may show as INFO level logs while completely ignoring the level set in the pino log. Google Cloud Logging also prefers that log data is present inside a `message` key instead of the default `msg` key that Pino uses. Use a technique similar to the one below to retain log levels in Google Cloud Logging diff --git a/docs/pretty.md b/docs/pretty.md index ca26a8ea4..9afb8647d 100644 --- a/docs/pretty.md +++ b/docs/pretty.md @@ -1,7 +1,7 @@ # Pretty Printing By default, Pino log lines are newline delimited JSON (NDJSON). This is perfect -for production usage and long term storage. It's not so great for development +for production usage and long-term storage. It's not so great for development environments. Thus, Pino logs can be prettified by using a Pino prettifier module like [`pino-pretty`][pp]: @@ -11,15 +11,15 @@ $ cat app.log | pino-pretty For almost all situations, this is the recommended way to prettify logs. The programmatic API, described in the next section, is primarily for integration -purposes with other CLI based prettifiers. +purposes with other CLI-based prettifiers. ## Prettifier API Pino prettifier modules are extra modules that provide a CLI for parsing NDJSON -log lines piped via `stdin` and expose an API which conforms to the Pino +log lines piped via `stdin` and expose an API that conforms to the Pino [metadata streams](/docs/api.md#metadata) API. -The API requires modules provide a factory function which returns a prettifier +The API requires modules provide a factory function that returns a prettifier function. This prettifier function must accept either a string of NDJSON or a Pino log object. A pseudo-example of such a prettifier is: @@ -86,7 +86,7 @@ will be written to the destination stream. that can be passed via `prettyPrint`. The default prettifier write stream does not guarantee final log writes. -Correspondingly, a warning is written to logs on first synchronous flushing. +Correspondingly, a warning is written to logs on the first synchronous flushing. This warning may be suppressed by passing `suppressFlushSyncWarning : true` to `prettyPrint`: ```js diff --git a/docs/redaction.md b/docs/redaction.md index 8dce14ac1..66dcae5c0 100644 --- a/docs/redaction.md +++ b/docs/redaction.md @@ -3,8 +3,8 @@ > Redaction is not supported in the browser [#670](https://github.com/pinojs/pino/issues/670) To redact sensitive information, supply paths to keys that hold sensitive data -using the `redact` option. Note that paths which contain hyphens need to use -brackets in order to access the hyphenated property: +using the `redact` option. Note that paths that contain hyphens need to use +brackets to access the hyphenated property: ```js const logger = require('.')({ @@ -120,7 +120,7 @@ Pino's redaction functionality is built on top of [`fast-redact`](https://github which adds about 2% overhead to `JSON.stringify` when using paths without wildcards. When used with pino logger with a single redacted path, any overhead is within noise - -a way to deterministically measure it's effect has not been found. This is because its not a bottleneck. +a way to deterministically measure its effect has not been found. This is because it is not a bottleneck. However, wildcard redaction does carry a non-trivial cost relative to explicitly declaring the keys (50% in a case where four keys are redacted across two objects). See @@ -129,7 +129,7 @@ the [`fast-redact` benchmarks](https://github.com/davidmarkclements/fast-redact# ## Safety The `redact` option is intended as an initialization time configuration option. -It's extremely important that path strings do not originate from user input. +Path strings must not originate from user input. The `fast-redact` module uses a VM context to syntax check the paths, user input should never be combined with such an approach. See the [`fast-redact` Caveat](https://github.com/davidmarkclements/fast-redact#caveat) and the [`fast-redact` Approach](https://github.com/davidmarkclements/fast-redact#approach) for in-depth information. diff --git a/docs/transports.md b/docs/transports.md index 075e0ecf1..32518d302 100644 --- a/docs/transports.md +++ b/docs/transports.md @@ -10,17 +10,17 @@ The way Pino generates logs: It is recommended that any log transformation or transmission is performed either in a separate thread or a separate process. -Prior to Pino v7 transports would ideally operate in a separate process - these are +Before Pino v7 transports would ideally operate in a separate process - these are now referred to as [Legacy Transports](#legacy-transports). -From Pino v7 and upwards transports can also operate inside a [Worker Thread][worker-thread], +From Pino v7 and upwards transports can also operate inside a [Worker Thread][worker-thread] and can be used or configured via the options object passed to `pino` on initialization. [worker-thread]: https://nodejs.org/dist/latest-v14.x/docs/api/worker_threads.html ## v7+ Transports -A transport is a module that exports a default function which returns a writable stream: +A transport is a module that exports a default function that returns a writable stream: ```js import { createWriteStream } from 'fs' @@ -62,11 +62,11 @@ export default async (options) => { ``` While initializing the stream we're able to use `await` to perform asynchronous operations. In this -case waiting for the write streams `open` event. +case, waiting for the write streams `open` event. Let's imagine the above was published to npm with the module name `some-file-transport`. -The `options.destination` value can be set when the creating the transport stream with `pino.transport` like so: +The `options.destination` value can be set when creating the transport stream with `pino.transport` like so: ```js const pino = require('pino') @@ -80,7 +80,7 @@ pino(transport) Note here we've specified a module by package rather than by relative path. The options object we provide is serialized and injected into the transport worker thread, then passed to the module's exported function. This means that the options object can only contain types that are supported by the -[Structured Clone Algorithm][sca] which is used to (de)serializing objects between threads. +[Structured Clone Algorithm][sca] which is used to (de)serialize objects between threads. What if we wanted to use both transports, but send only error logs to `some-file-transport` while sending all logs to `my-transport.mjs`? We can use the `pino.transport` function's `destinations` option: @@ -120,7 +120,7 @@ For more details on `pino.transport` see the [API docs for `pino.transport`][pin The module [pino-abstract-transport](https://github.com/pinojs/pino-abstract-transport) provides a simple utility to parse each line. Its usage is highly recommended. -You can see an example using a async iterator with ESM: +You can see an example using an async iterator with ESM: ```js import build from 'pino-abstract-transport' @@ -176,7 +176,7 @@ module.exports = function (opts) { To consume async iterators in batches, consider using the [hwp](https://github.com/mcollina/hwp) library. The `close()` function is needed to make sure that the stream is closed and flushed when its -callback is called or the returned promise resolved. Otherwise log lines will be lost. +callback is called or the returned promise resolves. Otherwise, log lines will be lost. ### Creating a transport pipeline @@ -240,8 +240,8 @@ a terminating target, i.e. a `Writable` stream.__ Pino provides basic support for transports written in TypeScript. -Ideally, they should be transpiled to ensure maximum compatibility, but some -times you might want to use tools such as TS-Node, to execute your TypeScript +Ideally, they should be transpiled to ensure maximum compatibility, but sometimes +you might want to use tools such as TS-Node, to execute your TypeScript code without having to go through an explicit transpilation step. You can use your TypeScript code without explicit transpilation, but there are @@ -315,7 +315,7 @@ pino(transport) ### Asynchronous startup The new transports boot asynchronously and calling `process.exit()` before the transport -started will cause logs to not be delivered. +starts will cause logs to not be delivered. ```js const pino = require('pino') @@ -338,7 +338,7 @@ transport.on('ready', function () { ## Legacy Transports -A legacy Pino "transport" is a supplementary tool which consumes Pino logs. +A legacy Pino "transport" is a supplementary tool that consumes Pino logs. Consider the following example for creating a transport: @@ -367,11 +367,11 @@ node my-app-which-logs-stuff-to-stdout.js | node my-transport-process.js Ideally, a transport should consume logs in a separate process to the application, Using transports in the same process causes unnecessary load and slows down -Node's single threaded event loop. +Node's single-threaded event loop. ## Known Transports -PR's to this document are welcome for any new transports! +PRs to this document are welcome for any new transports! ### Pino v7+ Compatible @@ -460,7 +460,7 @@ $ node app.js | pino-couch -U https://couch-server -d mylogs ### pino-datadog -The [pino-datadog](https://www.npmjs.com/package/pino-datadog) module is a transport that will forward logs to [DataDog](https://www.datadoghq.com/) through it's API. +The [pino-datadog](https://www.npmjs.com/package/pino-datadog) module is a transport that will forward logs to [DataDog](https://www.datadoghq.com/) through its API. Given an application `foo` that logs via pino, you would use `pino-datadog` like so: @@ -564,7 +564,7 @@ $ node index.js | pino-logflare --key YOUR_KEY --source YOUR_SOURCE The `pino-mq` transport will take all messages received on `process.stdin` and send them over a message bus using JSON serialization. -This useful for: +This is useful for: * moving backpressure from application to broker * transforming messages pressure to another component @@ -573,7 +573,7 @@ This useful for: node app.js | pino-mq -u "amqp://guest:guest@localhost/" -q "pino-logs" ``` -Alternatively a configuration file can be used: +Alternatively, a configuration file can be used: ``` node app.js | pino-mq -c pino-mq.json @@ -589,8 +589,8 @@ For full documentation of command line switches and configuration see [the `pino ### pino-loki -pino-loki is a transport that will forwards logs into [Grafana Loki](https://grafana.com/oss/loki/) -Can be used in CLI version in a separate process or in a dedicated worker : +pino-loki is a transport that will forwards logs into [Grafana Loki](https://grafana.com/oss/loki/). +Can be used in CLI version in a separate process or in a dedicated worker: CLI : ```console @@ -607,7 +607,7 @@ const transport = pino.transport({ pino(transport) ``` -For full documentation and configuration, see the [readme](https://github.com/Julien-R44/pino-loki) +For full documentation and configuration, see the [readme](https://github.com/Julien-R44/pino-loki). ### pino-papertrail @@ -637,7 +637,7 @@ Full documentation in the [readme](https://github.com/Xstoudi/pino-pg). $ node app.js | pino-mysql -c db-configuration.json ``` -`pino-mysql` can extract and save log fields into corresponding database field +`pino-mysql` can extract and save log fields into corresponding database fields and/or save the entire log stream as a [JSON Data Type][JSONDT]. For full documentation and command line switches read the [readme][pino-mysql]. @@ -668,7 +668,7 @@ $ node app.js | pino-redis -U redis://username:password@localhost:6379 $ node app.js | pino-sentry --dsn=https://******@sentry.io/12345 ``` -For full documentation of command line switches see the [pino-sentry readme](https://github.com/aandrewww/pino-sentry/blob/master/README.md) +For full documentation of command line switches see the [pino-sentry readme](https://github.com/aandrewww/pino-sentry/blob/master/README.md). [pino-sentry]: https://www.npmjs.com/package/pino-sentry [Sentry]: https://sentry.io/ @@ -742,7 +742,7 @@ const transport = pino.transport({ projectId: 1, projectKey: "REPLACE_ME", environment: "production", - // aditional options for airbrake + // additional options for airbrake performanceStats: false, }, }, @@ -757,7 +757,7 @@ pino(transport) ### pino-socket -[pino-socket][pino-socket] is a transport that will forward logs to a IPv4 +[pino-socket][pino-socket] is a transport that will forward logs to an IPv4 UDP or TCP socket. As an example, use `socat` to fake a listener: @@ -841,9 +841,9 @@ https://github.com/deviantony/docker-elk to setup an ELK stack. ### pino-stackdriver -The [pino-stackdriver](https://www.npmjs.com/package/pino-stackdriver) module is a transport that will forward logs to the [Google Stackdriver](https://cloud.google.com/logging/) log service through it's API. +The [pino-stackdriver](https://www.npmjs.com/package/pino-stackdriver) module is a transport that will forward logs to the [Google Stackdriver](https://cloud.google.com/logging/) log service through its API. -Given an application `foo` that logs via pino, a stackdriver log project `bar` and credentials in the file `/credentials.json`, you would use `pino-stackdriver` +Given an application `foo` that logs via pino, a stackdriver log project `bar`, and credentials in the file `/credentials.json`, you would use `pino-stackdriver` like so: ``` sh diff --git a/docs/web.md b/docs/web.md index 080350df3..40b7c6cf7 100644 --- a/docs/web.md +++ b/docs/web.md @@ -1,6 +1,6 @@ # Web Frameworks -Since HTTP logging is a primary use case, Pino has first class support for the Node.js +Since HTTP logging is a primary use case, Pino has first-class support for the Node.js web framework ecosystem. - [Web Frameworks](#web-frameworks) @@ -18,7 +18,7 @@ web framework ecosystem. The Fastify web framework comes bundled with Pino by default, simply set Fastify's `logger` option to `true` and use `request.log` or `reply.log` for log messages that correspond -to each individual request: +to each request: ```js const fastify = require('fastify')({ @@ -84,7 +84,7 @@ async function start () { method: 'GET', path: '/', handler: async function (request, h) { - // request.log is HAPI standard way of logging + // request.log is HAPI's standard way of logging request.log(['a', 'b'], 'Request into hello world') // a pino instance can also be used, which will be faster From 7962c76e31ae1003b784daa46723aec056936250 Mon Sep 17 00:00:00 2001 From: Simen Bekkhus Date: Fri, 16 Sep 2022 16:09:56 +0200 Subject: [PATCH 6/7] fix: type `formatters.log` with a Record type (#1561) --- pino.d.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pino.d.ts b/pino.d.ts index e0f7466e7..bbaac6ee8 100644 --- a/pino.d.ts +++ b/pino.d.ts @@ -586,7 +586,7 @@ declare namespace pino { * All arguments passed to the log method, except the message, will be pass to this function. * By default it does not change the shape of the log object. */ - log?: (object: object) => object; + log?: (object: Record) => Record; }; /** From 4f9e5a6ca6b24947534b180234f90c3630a6a960 Mon Sep 17 00:00:00 2001 From: Simen Bekkhus Date: Sat, 17 Sep 2022 16:59:51 +0200 Subject: [PATCH 7/7] fix: correct type of `args` passed to `hooks.logMethod` (#1558) --- pino.d.ts | 2 +- test/types/pino.test-d.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pino.d.ts b/pino.d.ts index bbaac6ee8..5442034e1 100644 --- a/pino.d.ts +++ b/pino.d.ts @@ -600,7 +600,7 @@ declare namespace pino { * log method and method is the log method itself, and level is the log level. This hook must invoke the method function by * using apply, like so: method.apply(this, newArgumentsArray). */ - logMethod?: (this: Logger, args: any[], method: LogFn, level: number) => void; + logMethod?: (this: Logger, args: Parameters, method: LogFn, level: number) => void; }; /** diff --git a/test/types/pino.test-d.ts b/test/types/pino.test-d.ts index e4ef8c885..f103d6587 100644 --- a/test/types/pino.test-d.ts +++ b/test/types/pino.test-d.ts @@ -230,7 +230,7 @@ const withHooks = pino({ hooks: { logMethod(args, method, level) { expectType(this); - return method.apply(this, ['msg', ...args]); + return method.apply(this, args); }, }, });