astro-robots-txt
Advanced tools
Comparing version 0.3.3 to 0.3.4
@@ -0,2 +1,5 @@ | ||
"use strict"; | ||
// src/index.ts | ||
import fs2 from "fs"; | ||
import { ZodError } from "zod"; | ||
@@ -106,8 +109,2 @@ | ||
// src/data/pkg-name.ts | ||
var packageName = "astro-robots-txt"; | ||
// src/on-build-done.ts | ||
import fs2 from "fs"; | ||
// src/validate-options.ts | ||
@@ -118,2 +115,3 @@ import { z as z2 } from "zod"; | ||
import { z } from "zod"; | ||
import isValidFilename from "valid-filename"; | ||
@@ -128,10 +126,11 @@ // src/config-defaults.ts | ||
} | ||
] | ||
], | ||
sitemapBaseFileName: "sitemap" | ||
}; | ||
// src/schema.ts | ||
var validateSitemapItem = () => z.string().min(1).refine((val) => !val || isValidHttpUrl(val), { | ||
var schemaSitemapItem = z.string().min(1).refine((val) => !val || isValidHttpUrl(val), { | ||
message: "Only valid URLs with `http` or `https` protocol allowed" | ||
}); | ||
var validateCleanParam = () => z.string().max(500); | ||
var schemaCleanParam = z.string().max(500); | ||
var RobotsTxtOptionsSchema = z.object({ | ||
@@ -141,3 +140,3 @@ host: z.string().optional().refine((val) => !val || isValidHostname(val), { | ||
}), | ||
sitemap: validateSitemapItem().or(validateSitemapItem().array()).or(z.boolean()).optional().default(ROBOTS_TXT_CONFIG_DEFAULTS.sitemap), | ||
sitemap: schemaSitemapItem.or(schemaSitemapItem.array()).or(z.boolean()).optional().default(ROBOTS_TXT_CONFIG_DEFAULTS.sitemap), | ||
policy: z.object({ | ||
@@ -147,5 +146,7 @@ userAgent: z.string().min(1), | ||
disallow: z.string().or(z.string().array()).optional(), | ||
cleanParam: validateCleanParam().or(validateCleanParam().array()).optional(), | ||
cleanParam: schemaCleanParam.or(schemaCleanParam.array()).optional(), | ||
crawlDelay: z.number().nonnegative().optional().refine((val) => typeof val === "undefined" || Number.isFinite(val), { message: "Must be finite number" }) | ||
}).array().nonempty().optional().default(ROBOTS_TXT_CONFIG_DEFAULTS.policy) | ||
}).array().nonempty().optional().default(ROBOTS_TXT_CONFIG_DEFAULTS.policy), | ||
sitemapBaseFileName: z.string().min(1).optional().refine((val) => !val || isValidFilename(val), { message: "Not valid file name" }).default(ROBOTS_TXT_CONFIG_DEFAULTS.sitemapBaseFileName), | ||
transform: z.function().args(z.string()).returns(z.any()).optional() | ||
}).default(ROBOTS_TXT_CONFIG_DEFAULTS); | ||
@@ -198,3 +199,3 @@ | ||
}; | ||
var getSitemapArr = (sitemap, filalSiteHref) => { | ||
var getSitemapArr = (sitemap, finalSiteHref, sitemapBaseFileName) => { | ||
if (typeof sitemap !== "undefined") { | ||
@@ -211,6 +212,7 @@ if (!sitemap) { | ||
} | ||
return [`${addBackSlash(filalSiteHref)}sitemap.xml`]; | ||
return [`${addBackSlash(finalSiteHref)}${sitemapBaseFileName}.xml`]; | ||
}; | ||
var getRobotsTxtContent = (finalSiteHref, { host, sitemap, policy } = {}) => { | ||
var getRobotsTxtContent = (finalSiteHref, opts) => { | ||
var _a; | ||
const { host, sitemap, policy, sitemapBaseFileName } = opts; | ||
let result = ""; | ||
@@ -220,3 +222,3 @@ policy == null ? void 0 : policy.forEach((item, index) => { | ||
}); | ||
(_a = getSitemapArr(sitemap, finalSiteHref)) == null ? void 0 : _a.forEach((item) => { | ||
(_a = getSitemapArr(sitemap, finalSiteHref, sitemapBaseFileName)) == null ? void 0 : _a.forEach((item) => { | ||
result += addLine("Sitemap", item); | ||
@@ -230,11 +232,4 @@ }); | ||
// src/on-build-done.ts | ||
var onBuildDone = (pluginOptions, config, dir) => { | ||
const opts = validateOptions(config.site, pluginOptions); | ||
const finalSiteHref = new URL(config.base, config.site).href; | ||
const robotsTxtContent = getRobotsTxtContent(finalSiteHref, opts); | ||
const url = new URL("robots.txt", dir); | ||
fs2.writeFileSync(url, robotsTxtContent); | ||
}; | ||
var on_build_done_default = onBuildDone; | ||
// src/data/pkg-name.ts | ||
var packageName = "astro-robots-txt"; | ||
@@ -260,3 +255,19 @@ // src/index.ts | ||
try { | ||
on_build_done_default(merged, config, dir); | ||
const opts = validateOptions(config.site, merged); | ||
const finalSiteHref = new URL(config.base, config.site).href; | ||
let robotsTxtContent = getRobotsTxtContent(finalSiteHref, opts); | ||
if (opts.transform) { | ||
try { | ||
robotsTxtContent = await Promise.resolve(opts.transform(robotsTxtContent)); | ||
if (!robotsTxtContent) { | ||
logger.warn("No content after transform."); | ||
return; | ||
} | ||
} catch (err) { | ||
logger.error(`Error transforming content | ||
${err.toString()}`); | ||
return; | ||
} | ||
} | ||
fs2.writeFileSync(new URL("robots.txt", dir), robotsTxtContent); | ||
logger.success("`robots.txt` is created."); | ||
@@ -263,0 +274,0 @@ } catch (err) { |
{ | ||
"name": "astro-robots-txt", | ||
"version": "0.3.3", | ||
"version": "0.3.4", | ||
"description": "Generate a robots.txt for Astro", | ||
@@ -36,10 +36,11 @@ "keywords": [ | ||
"deepmerge": "^4.2.2", | ||
"valid-filename": "^4.0.0", | ||
"zod": "^3.17.3" | ||
}, | ||
"devDependencies": { | ||
"@types/node": "^17.0.43", | ||
"astro": "^1.0.0-beta.46", | ||
"@types/node": "^18.0.0", | ||
"astro": "^1.0.0-beta.47", | ||
"at-scripts": "0.0.4", | ||
"c8": "^7.11.3", | ||
"typescript": "^4.7.3", | ||
"typescript": "^4.7.4", | ||
"vite": "^2.9.12", | ||
@@ -46,0 +47,0 @@ "vitest": "^0.15.1" |
@@ -109,7 +109,9 @@ [](https://bank.gov.ua/en/about/support-the-armed-forces) | ||
| Name | Type | Default | Description | | ||
| :-------: | :-----------------------------: | :------------------------------: | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | | ||
| `host` | `String` | `` | Host of your site | | ||
| `sitemap` | `Boolean / String` / `String[]` | `true` | Resulting output in a _robots.txt_ will be `Sitemap: your-site-url/sitemap.xml`.<br/>If `sitemap: false` - no `Sitemap` line in the output.<br/>When the `sitemap` is `String` or `String[]` its values have to be a valid **http** url. Empty strings or other protocols are not allowed. | | ||
| `policy` | `Policy[]` | [{ allow: '/', userAgent: '*' }] | List of `Policy` rules | | ||
| Name | Type | Required | Default | | | ||
| :---------: | :-----------------------------: | :------: | :-------------: | :------------------------------------------------------------------- | | ||
| `host` | `String` | No | | Host of your site | | ||
| `sitemap` |`Boolean` / `String` / `String[]`| No | `true` | Resulting output in a _robots.txt_ will be `Sitemap: your-site-url/sitemap.xml`.<br/>If `sitemap: false` - no `Sitemap` line in the output.<br/>When the `sitemap` is `String` or `String[]` its values have to be a valid **http** url. Empty strings or other protocols are not allowed. | | ||
| `policy` | `Policy[]` | No | [{ allow: `/`, userAgent: `*` }] | List of `Policy` rules | | ||
| `sitemapBaseFileName` | `String` | No | `sitemap`| The name of a sitemap file before the file extension (`.xml`). It's used if the `sitemap` option is `true`. | | ||
| `transform` |(content: `String`): `String` \| `Promise<String>` | No | | Called just before writing the text output to disk. Sync or async. | | ||
@@ -119,8 +121,8 @@ ### Policy | ||
| Name | Type | Required | Description | | ||
| :----------: | :-------------------: | :------: | :---------------------------------------------------------------------------------------------------: | | ||
| `userAgent` | `String` | Yes | You must provide name of user agent or wildcard | | ||
| `disallow` | `String` / `String[]` | No | Disallowed paths to index | | ||
| `allow` | `String` / `String[]` | No | Allowed paths to index | | ||
| :----------: | :-------------------: | :------: | :---------------------------------------------------------------------------------------------------- | | ||
| `userAgent` | `String` | Yes | You must provide name of user agent or wildcard | | ||
| `disallow` | `String` / `String[]` | No | Disallowed paths to index | | ||
| `allow` | `String` / `String[]` | No | Allowed paths to index | | ||
| `crawlDelay` | `Number` | No | Minimum interval (in secs) for the search robot to wait after loading one page, before starting other | | ||
| `cleanParam` | `String` / `String[]` | No | Indicates that the page URL contains parameters that should be ignored when indexing | | ||
| `cleanParam` | `String` / `String[]` | No | Indicates that the page URL contains parameters that should be ignored when indexing | | ||
@@ -164,2 +166,5 @@ **Sample of _astro.config.mjs_** | ||
], | ||
transform(content) { | ||
return `# some comments before main content\n#second line\n${content}`; | ||
}, | ||
}), | ||
@@ -166,0 +171,0 @@ ], |
20933
263
260
3
+ Addedvalid-filename@^4.0.0
+ Addedfilename-reserved-regex@3.0.0(transitive)
+ Addedvalid-filename@4.0.0(transitive)