First commit

This commit is contained in:
Lethary
2026-03-17 09:09:07 +01:00
commit 6d3d08aa8a
7635 changed files with 1720218 additions and 0 deletions
+17
View File
@@ -0,0 +1,17 @@
# editorconfig.org
root = true
[*]
charset = utf-8
end_of_line = lf
indent_size = 4
indent_style = space
insert_final_newline = true
trim_trailing_whitespace = true
[{compose.yaml,compose.*.yaml}]
indent_size = 2
[*.md]
trim_trailing_whitespace = false
+5
View File
@@ -0,0 +1,5 @@
APP_ENV=dev
DEFAULT_URI=http://localhost:8000
DATABASE_URL=postgresql://app:app@db:5432/app?serverVersion=16&charset=utf8
CORS_ALLOW_ORIGIN='^https?://(localhost|127\.0\.0\.1)(:[0-9]+)?$'
NUXT_PORT=3001
+6
View File
@@ -0,0 +1,6 @@
APP_ENV=dev
APP_SECRET=change-me
DEFAULT_URI=http://localhost:8000
DATABASE_URL=postgresql://app:app@db:5432/app?serverVersion=16&charset=utf8
CORS_ALLOW_ORIGIN='^https?://(localhost|127\.0\.0\.1)(:[0-9]+)?$'
NUXT_PORT=3001
+1
View File
@@ -0,0 +1 @@
APP_SECRET=change-me-local
+1
View File
@@ -0,0 +1 @@
+63
View File
@@ -0,0 +1,63 @@
name: Release
on:
push:
branches:
- main
- master
- setup-template
workflow_dispatch:
permissions:
contents: write
concurrency:
group: release-${{ github.ref }}
cancel-in-progress: false
jobs:
release:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Configure git
run: |
git config user.name "github-actions[bot]"
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
- name: Ensure scripts are executable
run: chmod +x bin/version bin/release-tag
- name: Sync VERSION
id: sync_version
run: |
VERSION="$(./bin/version)"
printf '%s\n' "$VERSION" > VERSION
git rev-parse HEAD > .version-ref
echo "version=$VERSION" >> "$GITHUB_OUTPUT"
if git diff --quiet -- VERSION .version-ref; then
echo "changed=false" >> "$GITHUB_OUTPUT"
exit 0
fi
git add VERSION .version-ref
git commit -m "chore: sync version $VERSION"
git push
echo "changed=true" >> "$GITHUB_OUTPUT"
- name: Create tag
run: |
VERSION="${{ steps.sync_version.outputs.version }}"
if git rev-parse "$VERSION" >/dev/null 2>&1; then
echo "Tag $VERSION already exists"
exit 0
fi
./bin/release-tag
git push origin "$VERSION"
+11
View File
@@ -0,0 +1,11 @@
###> symfony/framework-bundle ###
/.env.local
/.env.local.php
/.env.*.local
/config/secrets/prod/prod.decrypt.private.php
/public/bundles/
/var/
/vendor/
###< symfony/framework-bundle ###
.idea/
+1
View File
@@ -0,0 +1 @@
cedce158f07e0bfe8f9a604211693820e92efaf4
+121
View File
@@ -0,0 +1,121 @@
ENV_DEFAULT = .env
ENV_EXAMPLE = .env.example
ENV_LOCAL = .env.local
ENV_FILE := $(if $(wildcard $(ENV_LOCAL)),$(ENV_LOCAL),$(ENV_DEFAULT))
-include $(ENV_DEFAULT)
-include $(ENV_LOCAL)
DOCKER_COMPOSE = docker compose --env-file $(ENV_FILE)
PHP_SERVICE = php
NUXT_SERVICE = nuxt
DB_SERVICE = db
EXEC_PHP = $(DOCKER_COMPOSE) exec -T $(PHP_SERVICE)
EXEC_PHP_INTERACTIVE = $(DOCKER_COMPOSE) exec $(PHP_SERVICE)
EXEC_NUXT = $(DOCKER_COMPOSE) exec -T $(NUXT_SERVICE)
EXEC_NUXT_INTERACTIVE = $(DOCKER_COMPOSE) exec $(NUXT_SERVICE)
SYMFONY_CONSOLE = $(EXEC_PHP) php bin/console
FILES =
.PHONY: help first env-init start stop down dev logs install composer-install npm-install migration-migrate db-create db-reset cache-clear shell shell-front test version version-sync git-hooks-install release-tag
help:
@echo "Commandes disponibles:"
@echo " make first Installation complète du projet"
@echo " make start Lance les containers"
@echo " make stop Stoppe les containers"
@echo " make dev Lance Nuxt en mode dev"
@echo " make install Installe les dépendances"
@echo " make migration-migrate Execute les migrations"
@echo " make db-reset Réinitialise la base"
@echo " make shell Shell dans PHP"
@echo " make shell-front Shell dans Nuxt"
@echo " make logs Affiche les logs"
@echo " make version Calcule la version depuis les commits"
@echo " make version-sync Met a jour le fichier VERSION"
@echo " make git-hooks-install Installe le hook commit-msg"
@echo " make release-tag Cree un tag git de release"
first:
@echo "**** COPIE DU .ENV "
$(MAKE) env-init
@echo " START CONTAINERS "
$(MAKE) start
@echo " INSTALL DEPENDANCES "
$(MAKE) install
@echo " ALL DONE ****"
env-init:
@cp -n $(ENV_EXAMPLE) $(ENV_DEFAULT) 2>/dev/null || true
start: version-sync
@PORT=8000; \
while ss -tuln | grep -q ":$$PORT "; do \
PORT=$$((PORT+1)); \
done; \
echo "Port utilisé: $$PORT"; \
APP_PORT=$$PORT $(DOCKER_COMPOSE) up -d --remove-orphans
stop:
$(DOCKER_COMPOSE) stop
down:
$(DOCKER_COMPOSE) down
dev: version-sync
$(EXEC_NUXT_INTERACTIVE) npm run dev
logs:
$(DOCKER_COMPOSE) logs -f
install: composer-install npm-install cache-clear
composer-install:
$(EXEC_PHP) composer install
npm-install:
$(EXEC_NUXT) npm install
migration-migrate:
$(SYMFONY_CONSOLE) doctrine:migrations:migrate --no-interaction
db-create:
$(SYMFONY_CONSOLE) doctrine:database:create --if-not-exists
db-reset:
$(DOCKER_COMPOSE) down -v
$(DOCKER_COMPOSE) up -d
$(MAKE) db-create
$(MAKE) migration-migrate
cache-clear:
$(SYMFONY_CONSOLE) cache:clear
shell:
$(EXEC_PHP_INTERACTIVE) sh
shell-front:
$(EXEC_NUXT_INTERACTIVE) sh
test:
$(EXEC_PHP) php vendor/bin/phpunit $(FILES)
version:
@./bin/version
version-sync:
@VERSION_VALUE="$$(./bin/version)" && printf '%s\n' "$$VERSION_VALUE" > VERSION
@git rev-parse HEAD > .version-ref
@echo "VERSION=$$(cat VERSION)"
git-hooks-install:
@install -m 755 commit-msg .git/hooks/commit-msg
@echo "Hook commit-msg installé"
release-tag:
@./bin/release-tag
+92
View File
@@ -0,0 +1,92 @@
# Template
Projet Symfony + Nuxt avec Docker et `Makefile`.
## Installation
Créer la configuration locale :
```bash
cp .env.local.example .env.local
```
Mettre au minimum une valeur pour `APP_SECRET` dans `.env.local`.
Puis lancer l'initialisation du projet :
```bash
make first
```
## Workflow
Démarrer les containers :
```bash
make start
```
Lancer le frontend en développement :
```bash
make dev
```
Stopper les containers :
```bash
make stop
```
## Versioning
Le projet calcule automatiquement sa version à partir de la valeur présente dans `VERSION`.
- `fix:` incrémente le patch: `+0.0.1`
- `feat:` incrémente le minor: `+0.1.0`
- `feat!:` incrémente le major: `+1.0.0`
Commandes utiles :
```bash
make version
make version-sync
make git-hooks-install
make release-tag
```
`make start` et `make dev` synchronisent aussi automatiquement le fichier `VERSION` avant exécution.
Si tu modifies `VERSION` à la main, cette valeur devient la nouvelle base pour les prochains bumps. Par exemple, remettre `v0.0.0` fait repartir le versioning depuis `v0.0.0`.
La release crée un tag git annoté `vX.Y.Z` sur `HEAD`. La commande refuse de s'exécuter tant que l'arbre git n'est pas propre.
Le dépôt GitHub contient aussi une action automatique dans `.github/workflows/release.yml`. À chaque `push` sur `main`, `master` ou `setup-template`, elle :
- recalcule la version
- synchronise le fichier `VERSION`
- mémorise le dernier commit traité dans `.version-ref`
- commit `VERSION` si besoin
- crée et pousse le tag git correspondant
Le hook `commit-msg` accepte aussi les breaking changes avec `!`, par exemple :
```bash
feat!: replace auth flow
fix(api): prevent null response crash
```
## Variables
Base partagée :
- `.env`
- `.env.example`
Overrides locaux :
- `.env.local`
Exemple local :
- `.env.local.example`
+1
View File
@@ -0,0 +1 @@
v0.0.0
Executable
+21
View File
@@ -0,0 +1,21 @@
#!/usr/bin/env php
<?php
use App\Kernel;
use Symfony\Bundle\FrameworkBundle\Console\Application;
if (!is_dir(dirname(__DIR__).'/vendor')) {
throw new LogicException('Dependencies are missing. Try running "composer install".');
}
if (!is_file(dirname(__DIR__).'/vendor/autoload_runtime.php')) {
throw new LogicException('Symfony Runtime is missing. Try running "composer require symfony/runtime".');
}
require_once dirname(__DIR__).'/vendor/autoload_runtime.php';
return function (array $context) {
$kernel = new Kernel($context['APP_ENV'], (bool) $context['APP_DEBUG']);
return new Application($kernel);
};
+22
View File
@@ -0,0 +1,22 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
cd "$ROOT_DIR"
version="$(./bin/version)"
if [[ -n "$(git status --short)" ]]; then
echo "❌ Release impossible: l'arbre git contient des modifications non commit."
echo "➡️ Lance d'abord make version-sync, commit les changements, puis relance la release."
exit 1
fi
if git rev-parse "$version" >/dev/null 2>&1; then
echo "❌ Le tag $version existe deja."
exit 1
fi
git tag -a "$version" -m "Release $version"
echo "Tag cree: $version"
Executable
+64
View File
@@ -0,0 +1,64 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
cd "$ROOT_DIR"
VERSION_FILE="VERSION"
ANCHOR_FILE=".version-ref"
parse_version() {
local version="$1"
if [[ ! "$version" =~ ^v([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
echo "❌ VERSION invalide: $version" >&2
echo "➡️ Format attendu: v0.0.0" >&2
exit 1
fi
major="${BASH_REMATCH[1]}"
minor="${BASH_REMATCH[2]}"
patch="${BASH_REMATCH[3]}"
}
current_version="v0.0.0"
if [[ -f "$VERSION_FILE" ]]; then
current_version="$(tr -d '\r\n' < "$VERSION_FILE")"
fi
parse_version "$current_version"
commit_range="HEAD"
if [[ -f "$ANCHOR_FILE" ]]; then
anchor_ref="$(tr -d '\r\n' < "$ANCHOR_FILE")"
if [[ -n "$anchor_ref" ]] && git rev-parse --verify "$anchor_ref" >/dev/null 2>&1; then
commit_range="${anchor_ref}..HEAD"
fi
fi
while IFS= read -r commit_hash; do
[[ -z "$commit_hash" ]] && continue
subject="$(git log -1 --format=%s "$commit_hash")"
body="$(git log -1 --format=%b "$commit_hash")"
if [[ "$subject" =~ ^[a-z]+(\([a-z0-9._/-]+\))?!: ]] || [[ "$body" == *"BREAKING CHANGE:"* ]]; then
major=$((major + 1))
minor=0
patch=0
continue
fi
if [[ "$subject" =~ ^feat(\([a-z0-9._/-]+\))?: ]]; then
minor=$((minor + 1))
patch=0
continue
fi
if [[ "$subject" =~ ^fix(\([a-z0-9._/-]+\))?: ]]; then
patch=$((patch + 1))
fi
done < <(git rev-list --reverse "$commit_range")
printf 'v%s.%s.%s\n' "$major" "$minor" "$patch"
+41
View File
@@ -0,0 +1,41 @@
#!/usr/bin/env bash
set -euo pipefail
MSG_FILE="${1}"
FIRST_LINE="$(head -n 1 "$MSG_FILE" | tr -d '\r')"
BODY_CONTENT="$(tail -n +2 "$MSG_FILE" | tr -d '\r')"
# Autoriser commits auto-générés par git
if [[ "$FIRST_LINE" =~ ^Merge\ ]]; then
exit 0
fi
# Types autorisés (MINUSCULES uniquement)
# Optionnel: scope => feat(auth): ...
# Optionnel: breaking change => feat!: ... / feat(auth)!: ...
REGEX='^(build|chore|ci|docs|feat|fix|perf|refactor|revert|style|test)(\([a-z0-9._/-]+\))?(!)?: .+'
if [[ ! "$FIRST_LINE" =~ $REGEX ]]; then
echo "❌ Message de commit invalide."
echo ""
echo "➡️ Format attendu : <type>(<scope optionnel>)!?: <message>"
echo "➡️ Types autorisés (minuscules uniquement) :"
echo " build, chore, ci, docs, feat, fix, perf, refactor, revert, style, test"
echo ""
echo "✅ Exemples :"
echo " feat: add login page"
echo " fix(auth): prevent null token crash"
echo " feat!: replace legacy auth flow"
echo " docs: update README"
echo ""
echo "❌ Exemple refusé :"
echo " Feat: add login page"
exit 1
fi
if [[ "$BODY_CONTENT" == *"BREAKING CHANGE:"* ]] && [[ ! "$FIRST_LINE" =~ !: ]]; then
echo "❌ Breaking change détecté sans marqueur ! dans le titre."
echo ""
echo "➡️ Utilise par exemple : feat!: replace legacy auth flow"
exit 1
fi
+86
View File
@@ -0,0 +1,86 @@
{
"type": "project",
"license": "proprietary",
"minimum-stability": "stable",
"prefer-stable": true,
"require": {
"php": ">=8.2",
"ext-ctype": "*",
"ext-iconv": "*",
"api-platform/doctrine-orm": "^4.2",
"api-platform/symfony": "^4.2",
"doctrine/doctrine-bundle": "^2.18",
"doctrine/doctrine-migrations-bundle": "^3.7",
"doctrine/orm": "^3.6",
"nelmio/cors-bundle": "^2.6",
"phpdocumentor/reflection-docblock": "^6.0",
"phpstan/phpdoc-parser": "^2.3",
"symfony/asset": "7.4.*",
"symfony/console": "7.4.*",
"symfony/dotenv": "7.4.*",
"symfony/expression-language": "7.4.*",
"symfony/flex": "^2",
"symfony/framework-bundle": "7.4.*",
"symfony/property-access": "7.4.*",
"symfony/property-info": "7.4.*",
"symfony/runtime": "7.4.*",
"symfony/security-bundle": "7.4.*",
"symfony/serializer": "7.4.*",
"symfony/twig-bundle": "7.4.*",
"symfony/validator": "7.4.*",
"symfony/yaml": "7.4.*"
},
"config": {
"allow-plugins": {
"php-http/discovery": true,
"symfony/flex": true,
"symfony/runtime": true
},
"bump-after-update": true,
"sort-packages": true
},
"autoload": {
"psr-4": {
"App\\": "src/"
}
},
"autoload-dev": {
"psr-4": {
"App\\Tests\\": "tests/"
}
},
"replace": {
"symfony/polyfill-ctype": "*",
"symfony/polyfill-iconv": "*",
"symfony/polyfill-php72": "*",
"symfony/polyfill-php73": "*",
"symfony/polyfill-php74": "*",
"symfony/polyfill-php80": "*",
"symfony/polyfill-php81": "*",
"symfony/polyfill-php82": "*"
},
"scripts": {
"auto-scripts": {
"cache:clear": "symfony-cmd",
"assets:install %PUBLIC_DIR%": "symfony-cmd"
},
"post-install-cmd": [
"@auto-scripts"
],
"post-update-cmd": [
"@auto-scripts"
]
},
"conflict": {
"symfony/symfony": "*"
},
"extra": {
"symfony": {
"allow-contrib": false,
"require": "7.4.*"
}
},
"require-dev": {
"symfony/maker-bundle": "^1.66"
}
}
Generated
+7711
View File
File diff suppressed because it is too large Load Diff
+12
View File
@@ -0,0 +1,12 @@
<?php
return [
Symfony\Bundle\FrameworkBundle\FrameworkBundle::class => ['all' => true],
Symfony\Bundle\TwigBundle\TwigBundle::class => ['all' => true],
Symfony\Bundle\SecurityBundle\SecurityBundle::class => ['all' => true],
Doctrine\Bundle\DoctrineBundle\DoctrineBundle::class => ['all' => true],
Doctrine\Bundle\MigrationsBundle\DoctrineMigrationsBundle::class => ['all' => true],
Nelmio\CorsBundle\NelmioCorsBundle::class => ['all' => true],
ApiPlatform\Symfony\Bundle\ApiPlatformBundle::class => ['all' => true],
Symfony\Bundle\MakerBundle\MakerBundle::class => ['dev' => true],
];
+7
View File
@@ -0,0 +1,7 @@
api_platform:
title: Hello API Platform
version: 1.0.0
defaults:
stateless: true
cache_headers:
vary: ['Content-Type', 'Authorization', 'Origin']
+19
View File
@@ -0,0 +1,19 @@
framework:
cache:
# Unique name of your app: used to compute stable namespaces for cache keys.
#prefix_seed: your_vendor_name/app_name
# The "app" cache stores to the filesystem by default.
# The data in this cache should persist between deploys.
# Other options include:
# Redis
#app: cache.adapter.redis
#default_redis_provider: redis://localhost
# APCu (not recommended with heavy random-write workloads as memory fragmentation can cause perf issues)
#app: cache.adapter.apcu
# Namespaced pools use the above "app" backend by default
#pools:
#my.dedicated.cache: null
+54
View File
@@ -0,0 +1,54 @@
doctrine:
dbal:
url: '%env(resolve:DATABASE_URL)%'
# IMPORTANT: You MUST configure your server version,
# either here or in the DATABASE_URL env var (see .env file)
#server_version: '16'
profiling_collect_backtrace: '%kernel.debug%'
use_savepoints: true
orm:
auto_generate_proxy_classes: true
enable_lazy_ghost_objects: true
report_fields_where_declared: true
validate_xml_mapping: true
naming_strategy: doctrine.orm.naming_strategy.underscore_number_aware
identity_generation_preferences:
Doctrine\DBAL\Platforms\PostgreSQLPlatform: identity
auto_mapping: true
mappings:
App:
type: attribute
is_bundle: false
dir: '%kernel.project_dir%/src/Entity'
prefix: 'App\Entity'
alias: App
controller_resolver:
auto_mapping: false
when@test:
doctrine:
dbal:
# "TEST_TOKEN" is typically set by ParaTest
dbname_suffix: '_test%env(default::TEST_TOKEN)%'
when@prod:
doctrine:
orm:
auto_generate_proxy_classes: false
proxy_dir: '%kernel.build_dir%/doctrine/orm/Proxies'
query_cache_driver:
type: pool
pool: doctrine.system_cache_pool
result_cache_driver:
type: pool
pool: doctrine.result_cache_pool
framework:
cache:
pools:
doctrine.result_cache_pool:
adapter: cache.app
doctrine.system_cache_pool:
adapter: cache.system
+6
View File
@@ -0,0 +1,6 @@
doctrine_migrations:
migrations_paths:
# namespace is arbitrary but should be different from App\Migrations
# as migrations classes should NOT be autoloaded
'DoctrineMigrations': '%kernel.project_dir%/migrations'
enable_profiler: false
+15
View File
@@ -0,0 +1,15 @@
# see https://symfony.com/doc/current/reference/configuration/framework.html
framework:
secret: '%env(APP_SECRET)%'
# Note that the session will be started ONLY if you read or write from it.
session: true
#esi: true
#fragments: true
when@test:
framework:
test: true
session:
storage_factory_id: session.storage.factory.mock_file
+10
View File
@@ -0,0 +1,10 @@
nelmio_cors:
defaults:
origin_regex: true
allow_origin: ['%env(CORS_ALLOW_ORIGIN)%']
allow_methods: ['GET', 'OPTIONS', 'POST', 'PUT', 'PATCH', 'DELETE']
allow_headers: ['Content-Type', 'Authorization']
expose_headers: ['Link']
max_age: 3600
paths:
'^/': null
+3
View File
@@ -0,0 +1,3 @@
framework:
property_info:
with_constructor_extractor: true
+10
View File
@@ -0,0 +1,10 @@
framework:
router:
# Configure how to generate URLs in non-HTTP contexts, such as CLI commands.
# See https://symfony.com/doc/current/routing.html#generating-urls-in-commands
default_uri: '%env(DEFAULT_URI)%'
when@prod:
framework:
router:
strict_requirements: null
+39
View File
@@ -0,0 +1,39 @@
security:
# https://symfony.com/doc/current/security.html#registering-the-user-hashing-passwords
password_hashers:
Symfony\Component\Security\Core\User\PasswordAuthenticatedUserInterface: 'auto'
# https://symfony.com/doc/current/security.html#loading-the-user-the-user-provider
providers:
users_in_memory: { memory: null }
firewalls:
dev:
# Ensure dev tools and static assets are always allowed
pattern: ^/(_profiler|_wdt|assets|build)/
security: false
main:
lazy: true
provider: users_in_memory
# Activate different ways to authenticate:
# https://symfony.com/doc/current/security.html#the-firewall
# https://symfony.com/doc/current/security/impersonating_user.html
# switch_user: true
# Note: Only the *first* matching rule is applied
access_control:
# - { path: ^/admin, roles: ROLE_ADMIN }
# - { path: ^/profile, roles: ROLE_USER }
when@test:
security:
password_hashers:
# Password hashers are resource-intensive by design to ensure security.
# In tests, it's safe to reduce their cost to improve performance.
Symfony\Component\Security\Core\User\PasswordAuthenticatedUserInterface:
algorithm: auto
cost: 4 # Lowest possible value for bcrypt
time_cost: 3 # Lowest possible value for argon
memory_cost: 10 # Lowest possible value for argon
+6
View File
@@ -0,0 +1,6 @@
twig:
file_name_pattern: '*.twig'
when@test:
twig:
strict_variables: true
+11
View File
@@ -0,0 +1,11 @@
framework:
validation:
# Enables validator auto-mapping support.
# For instance, basic validation constraints will be inferred from Doctrine's metadata.
#auto_mapping:
# App\Entity\: []
when@test:
framework:
validation:
not_compromised_password: false
+5
View File
@@ -0,0 +1,5 @@
<?php
if (file_exists(dirname(__DIR__).'/var/cache/prod/App_KernelProdContainer.preload.php')) {
require dirname(__DIR__).'/var/cache/prod/App_KernelProdContainer.preload.php';
}
+1727
View File
File diff suppressed because it is too large Load Diff
+11
View File
@@ -0,0 +1,11 @@
# yaml-language-server: $schema=../vendor/symfony/routing/Loader/schema/routing.schema.json
# This file is the entry point to configure the routes of your app.
# Methods with the #[Route] attribute are automatically imported.
# See also https://symfony.com/doc/current/routing.html
# To list all registered routes, run the following command:
# bin/console debug:router
controllers:
resource: routing.controllers
+4
View File
@@ -0,0 +1,4 @@
api_platform:
resource: .
type: api_platform
prefix: /api
+4
View File
@@ -0,0 +1,4 @@
when@dev:
_errors:
resource: '@FrameworkBundle/Resources/config/routing/errors.php'
prefix: /_error
+3
View File
@@ -0,0 +1,3 @@
_security_logout:
resource: security.route_loader.logout
type: service
+23
View File
@@ -0,0 +1,23 @@
# yaml-language-server: $schema=../vendor/symfony/dependency-injection/Loader/schema/services.schema.json
# This file is the entry point to configure your own services.
# Files in the packages/ subdirectory configure your dependencies.
# See also https://symfony.com/doc/current/service_container/import.html
# Put parameters here that don't need to change on each machine where the app is deployed
# https://symfony.com/doc/current/best_practices.html#use-parameters-for-application-configuration
parameters:
services:
# default configuration for services in *this* file
_defaults:
autowire: true # Automatically injects dependencies in your services.
autoconfigure: true # Automatically registers your services as commands, event subscribers, etc.
# makes classes in src/ available to be used as services
# this creates a service per class whose id is the fully-qualified class name
App\:
resource: '../src/'
# add more service definitions when explicit configuration is needed
# please note that last definitions always *replace* previous ones
+44
View File
@@ -0,0 +1,44 @@
services:
php:
build:
context: .
dockerfile: docker/php/Dockerfile
working_dir: /var/www/html
volumes:
- .:/var/www/html
depends_on:
- db
nginx:
image: nginx:alpine
ports:
- "${APP_PORT}:8000"
volumes:
- .:/var/www/html
- ./docker/nginx/default.conf:/etc/nginx/conf.d/default.conf
depends_on:
- php
db:
image: postgres:16
environment:
POSTGRES_DB: app
POSTGRES_USER: app
POSTGRES_PASSWORD: app
volumes:
- template_db_data:/var/lib/postgresql/data
nuxt:
build:
context: ./frontend
dockerfile: Dockerfile
working_dir: /app
ports:
- "${NUXT_PORT:-3001}:3000"
volumes:
- ./frontend:/app
- /app/node_modules
volumes:
template_db_data:
+22
View File
@@ -0,0 +1,22 @@
server {
listen 80;
root /var/www/html/public;
index index.php;
location / {
try_files $uri /index.php$is_args$args;
}
location ~ ^/index\.php(/|$) {
fastcgi_pass php:9000;
fastcgi_split_path_info ^(.+\.php)(/.*)$;
include fastcgi_params;
fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name;
internal;
}
location ~ \.php$ {
return 404;
}
}
+12
View File
@@ -0,0 +1,12 @@
FROM php:8.3-fpm
RUN apt-get update && apt-get install -y \
git \
unzip \
curl \
libpq-dev \
&& docker-php-ext-install pdo pdo_pgsql
COPY --from=composer:2 /usr/bin/composer /usr/bin/composer
WORKDIR /var/www/html
+3
View File
@@ -0,0 +1,3 @@
import createConfigForNuxt from "@nuxt/eslint-config"
export default createConfigForNuxt()
+2
View File
@@ -0,0 +1,2 @@
# Variables frontend optionnelles
# Copier vers frontend/.env si besoin
+24
View File
@@ -0,0 +1,24 @@
# Nuxt dev/build outputs
.output
.data
.nuxt
.nitro
.cache
dist
# Node dependencies
node_modules
# Logs
logs
*.log
# Misc
.DS_Store
.fleet
.idea
# Local env files
.env
.env.*
!.env.example
+13
View File
@@ -0,0 +1,13 @@
FROM node:20
WORKDIR /app
COPY package*.json ./
RUN npm install
COPY . .
EXPOSE 3000
CMD ["npm","run","dev","--","--host","0.0.0.0"]
+75
View File
@@ -0,0 +1,75 @@
# Nuxt Minimal Starter
Look at the [Nuxt documentation](https://nuxt.com/docs/getting-started/introduction) to learn more.
## Setup
Make sure to install dependencies:
```bash
# npm
npm install
# pnpm
pnpm install
# yarn
yarn install
# bun
bun install
```
## Development Server
Start the development server on `http://localhost:3000`:
```bash
# npm
npm run dev
# pnpm
pnpm dev
# yarn
yarn dev
# bun
bun run dev
```
## Production
Build the application for production:
```bash
# npm
npm run build
# pnpm
pnpm build
# yarn
yarn build
# bun
bun run build
```
Locally preview production build:
```bash
# npm
npm run preview
# pnpm
pnpm preview
# yarn
yarn preview
# bun
bun run preview
```
Check out the [deployment documentation](https://nuxt.com/docs/getting-started/deployment) for more information.
+6
View File
@@ -0,0 +1,6 @@
<template>
<div>
<NuxtRouteAnnouncer />
<NuxtWelcome />
</div>
</template>
+5
View File
@@ -0,0 +1,5 @@
// https://nuxt.com/docs/api/configuration/nuxt-config
export default defineNuxtConfig({
compatibilityDate: '2025-07-15',
devtools: { enabled: true }
})
+9715
View File
File diff suppressed because it is too large Load Diff
+17
View File
@@ -0,0 +1,17 @@
{
"name": "frontend",
"type": "module",
"private": true,
"scripts": {
"build": "nuxt build",
"dev": "nuxt dev",
"generate": "nuxt generate",
"preview": "nuxt preview",
"postinstall": "nuxt prepare"
},
"dependencies": {
"nuxt": "^4.3.1",
"vue": "^3.5.30",
"vue-router": "^4.6.4"
}
}
Binary file not shown.

After

Width:  |  Height:  |  Size: 4.2 KiB

+2
View File
@@ -0,0 +1,2 @@
User-Agent: *
Disallow:
+18
View File
@@ -0,0 +1,18 @@
{
// https://nuxt.com/docs/guide/concepts/typescript
"files": [],
"references": [
{
"path": "./.nuxt/tsconfig.app.json"
},
{
"path": "./.nuxt/tsconfig.server.json"
},
{
"path": "./.nuxt/tsconfig.shared.json"
},
{
"path": "./.nuxt/tsconfig.node.json"
}
]
}
View File
+31
View File
@@ -0,0 +1,31 @@
<?php
declare(strict_types=1);
namespace DoctrineMigrations;
use Doctrine\DBAL\Schema\Schema;
use Doctrine\Migrations\AbstractMigration;
/**
* Auto-generated Migration: Please modify to your needs!
*/
final class Version20260311111210 extends AbstractMigration
{
public function getDescription(): string
{
return '';
}
public function up(Schema $schema): void
{
// this up() migration is auto-generated, please modify it to your needs
$this->addSql('CREATE TABLE "user" (id INT GENERATED BY DEFAULT AS IDENTITY NOT NULL, name VARCHAR(255) NOT NULL, PRIMARY KEY (id))');
}
public function down(Schema $schema): void
{
// this down() migration is auto-generated, please modify it to your needs
$this->addSql('DROP TABLE "user"');
}
}
Generated Vendored Symlink
+1
View File
@@ -0,0 +1 @@
../acorn/bin/acorn
+1
View File
@@ -0,0 +1 @@
../baseline-browser-mapping/dist/cli.cjs
Generated Vendored Symlink
+1
View File
@@ -0,0 +1 @@
../browserslist/cli.js
Generated Vendored Symlink
+1
View File
@@ -0,0 +1 @@
../@eslint/config-inspector/bin.mjs
Generated Vendored Symlink
+1
View File
@@ -0,0 +1 @@
../cssesc/bin/cssesc
Generated Vendored Symlink
+1
View File
@@ -0,0 +1 @@
../esbuild/bin/esbuild
Generated Vendored Symlink
+1
View File
@@ -0,0 +1 @@
../eslint/bin/eslint.js
+1
View File
@@ -0,0 +1 @@
../@eslint/config-inspector/bin.mjs
Generated Vendored Symlink
+1
View File
@@ -0,0 +1 @@
../giget/dist/cli.mjs
Generated Vendored Symlink
+1
View File
@@ -0,0 +1 @@
../jiti/lib/jiti-cli.mjs
Generated Vendored Symlink
+1
View File
@@ -0,0 +1 @@
../js-yaml/bin/js-yaml.js
Generated Vendored Symlink
+1
View File
@@ -0,0 +1 @@
../jsesc/bin/jsesc
Generated Vendored Symlink
+1
View File
@@ -0,0 +1 @@
../nanoid/bin/nanoid.cjs
Generated Vendored Symlink
+1
View File
@@ -0,0 +1 @@
../napi-postinstall/lib/cli.js
Generated Vendored Symlink
+1
View File
@@ -0,0 +1 @@
../which/bin/node-which
Generated Vendored Symlink
+1
View File
@@ -0,0 +1 @@
../nypm/dist/cli.mjs
Generated Vendored Symlink
+1
View File
@@ -0,0 +1 @@
../@babel/parser/bin/babel-parser.js
Generated Vendored Symlink
+1
View File
@@ -0,0 +1 @@
../regexp-tree/bin/regexp-tree
Generated Vendored Symlink
+1
View File
@@ -0,0 +1 @@
../regjsparser/bin/parser
Generated Vendored Symlink
+1
View File
@@ -0,0 +1 @@
../rollup/dist/bin/rollup
Generated Vendored Symlink
+1
View File
@@ -0,0 +1 @@
../semver/bin/semver.js
Generated Vendored Symlink
+1
View File
@@ -0,0 +1 @@
../typescript/bin/tsc
Generated Vendored Symlink
+1
View File
@@ -0,0 +1 @@
../typescript/bin/tsserver
Generated Vendored Symlink
+1
View File
@@ -0,0 +1 @@
../untyped/dist/cli.mjs
+1
View File
@@ -0,0 +1 @@
../update-browserslist-db/cli.js
Generated Vendored Symlink
+1
View File
@@ -0,0 +1 @@
../vite/bin/vite.js
+3964
View File
File diff suppressed because it is too large Load Diff
+21
View File
@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2021 Anthony Fu <https://github.com/antfu>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
+27
View File
@@ -0,0 +1,27 @@
# install-pkg
[![NPM version](https://img.shields.io/npm/v/@antfu/install-pkg?color=a1b858&label=)](https://www.npmjs.com/package/@antfu/install-pkg)
Install package programmatically. Detect package managers automatically (`npm`, `yarn`, `bun` and `pnpm`).
```bash
npm i @antfu/install-pkg
```
```ts
import { installPackage } from '@antfu/install-pkg'
await installPackage('vite', { silent: true })
```
## Sponsors
<p align="center">
<a href="https://cdn.jsdelivr.net/gh/antfu/static/sponsors.svg">
<img src='https://cdn.jsdelivr.net/gh/antfu/static/sponsors.svg'/>
</a>
</p>
## License
[MIT](./LICENSE) License © 2021 [Anthony Fu](https://github.com/antfu)
+135
View File
@@ -0,0 +1,135 @@
"use strict";
var __create = Object.create;
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __getProtoOf = Object.getPrototypeOf;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
// If the importer is in node compatibility mode or this is not an ESM
// file that has been converted to a CommonJS file using a Babel-
// compatible transform (i.e. "__esModule" has not been set), then set
// "default" to the CommonJS "module.exports" for node compatibility.
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
mod
));
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
// src/index.ts
var index_exports = {};
__export(index_exports, {
detectPackageManager: () => detectPackageManager,
installPackage: () => installPackage,
uninstallPackage: () => uninstallPackage
});
module.exports = __toCommonJS(index_exports);
// src/detect.ts
var import_node_process = __toESM(require("process"), 1);
var import_detect = require("package-manager-detector/detect");
async function detectPackageManager(cwd = import_node_process.default.cwd()) {
const result = await (0, import_detect.detect)({
cwd,
onUnknown(packageManager) {
console.warn("[@antfu/install-pkg] Unknown packageManager:", packageManager);
return void 0;
}
});
return result?.agent || null;
}
// src/install.ts
var import_node_fs = require("fs");
var import_node_path = require("path");
var import_node_process2 = __toESM(require("process"), 1);
var import_tinyexec = require("tinyexec");
async function installPackage(names, options = {}) {
const detectedAgent = options.packageManager || await detectPackageManager(options.cwd) || "npm";
const [agent] = detectedAgent.split("@");
if (!Array.isArray(names))
names = [names];
const args = (typeof options.additionalArgs === "function" ? options.additionalArgs(agent, detectedAgent) : options.additionalArgs) || [];
if (options.preferOffline) {
if (detectedAgent === "yarn@berry")
args.unshift("--cached");
else
args.unshift("--prefer-offline");
}
if (agent === "pnpm") {
args.unshift(
/**
* Prevent pnpm from removing installed devDeps while `NODE_ENV` is `production`
* @see https://pnpm.io/cli/install#--prod--p
*/
"--prod=false"
);
if ((0, import_node_fs.existsSync)((0, import_node_path.resolve)(options.cwd ?? import_node_process2.default.cwd(), "pnpm-workspace.yaml"))) {
args.unshift("-w");
}
}
return (0, import_tinyexec.x)(
agent,
[
agent === "yarn" ? "add" : "install",
options.dev ? "-D" : "",
...args,
...names
].filter(Boolean),
{
nodeOptions: {
stdio: options.silent ? "ignore" : "inherit",
cwd: options.cwd
},
throwOnError: true
}
);
}
// src/uninstall.ts
var import_node_fs2 = require("fs");
var import_node_process3 = __toESM(require("process"), 1);
var import_node_path2 = require("path");
var import_tinyexec2 = require("tinyexec");
async function uninstallPackage(names, options = {}) {
const detectedAgent = options.packageManager || await detectPackageManager(options.cwd) || "npm";
const [agent] = detectedAgent.split("@");
if (!Array.isArray(names))
names = [names];
const args = options.additionalArgs || [];
if (agent === "pnpm" && (0, import_node_fs2.existsSync)((0, import_node_path2.resolve)(options.cwd ?? import_node_process3.default.cwd(), "pnpm-workspace.yaml")))
args.unshift("-w");
return (0, import_tinyexec2.x)(
agent,
[
agent === "yarn" ? "remove" : "uninstall",
options.dev ? "-D" : "",
...args,
...names
].filter(Boolean),
{
nodeOptions: {
stdio: options.silent ? "ignore" : "inherit",
cwd: options.cwd
},
throwOnError: true
}
);
}
// Annotate the CommonJS export names for ESM import in node:
0 && (module.exports = {
detectPackageManager,
installPackage,
uninstallPackage
});
+27
View File
@@ -0,0 +1,27 @@
import { Agent } from 'package-manager-detector';
export { Agent } from 'package-manager-detector';
import * as tinyexec from 'tinyexec';
type PackageManager = 'pnpm' | 'yarn' | 'npm' | 'bun';
declare function detectPackageManager(cwd?: string): Promise<Agent | null>;
interface InstallPackageOptions {
cwd?: string;
dev?: boolean;
silent?: boolean;
packageManager?: string;
preferOffline?: boolean;
additionalArgs?: string[] | ((agent: string, detectedAgent: string) => string[] | undefined);
}
declare function installPackage(names: string | string[], options?: InstallPackageOptions): Promise<tinyexec.Output>;
interface UninstallPackageOptions {
cwd?: string;
dev?: boolean;
silent?: boolean;
packageManager?: string;
additionalArgs?: string[];
}
declare function uninstallPackage(names: string | string[], options?: UninstallPackageOptions): Promise<tinyexec.Output>;
export { type InstallPackageOptions, type PackageManager, type UninstallPackageOptions, detectPackageManager, installPackage, uninstallPackage };
+27
View File
@@ -0,0 +1,27 @@
import { Agent } from 'package-manager-detector';
export { Agent } from 'package-manager-detector';
import * as tinyexec from 'tinyexec';
type PackageManager = 'pnpm' | 'yarn' | 'npm' | 'bun';
declare function detectPackageManager(cwd?: string): Promise<Agent | null>;
interface InstallPackageOptions {
cwd?: string;
dev?: boolean;
silent?: boolean;
packageManager?: string;
preferOffline?: boolean;
additionalArgs?: string[] | ((agent: string, detectedAgent: string) => string[] | undefined);
}
declare function installPackage(names: string | string[], options?: InstallPackageOptions): Promise<tinyexec.Output>;
interface UninstallPackageOptions {
cwd?: string;
dev?: boolean;
silent?: boolean;
packageManager?: string;
additionalArgs?: string[];
}
declare function uninstallPackage(names: string | string[], options?: UninstallPackageOptions): Promise<tinyexec.Output>;
export { type InstallPackageOptions, type PackageManager, type UninstallPackageOptions, detectPackageManager, installPackage, uninstallPackage };
+96
View File
@@ -0,0 +1,96 @@
// src/detect.ts
import process from "node:process";
import { detect } from "package-manager-detector/detect";
async function detectPackageManager(cwd = process.cwd()) {
const result = await detect({
cwd,
onUnknown(packageManager) {
console.warn("[@antfu/install-pkg] Unknown packageManager:", packageManager);
return void 0;
}
});
return result?.agent || null;
}
// src/install.ts
import { existsSync } from "node:fs";
import { resolve } from "node:path";
import process2 from "node:process";
import { x } from "tinyexec";
async function installPackage(names, options = {}) {
const detectedAgent = options.packageManager || await detectPackageManager(options.cwd) || "npm";
const [agent] = detectedAgent.split("@");
if (!Array.isArray(names))
names = [names];
const args = (typeof options.additionalArgs === "function" ? options.additionalArgs(agent, detectedAgent) : options.additionalArgs) || [];
if (options.preferOffline) {
if (detectedAgent === "yarn@berry")
args.unshift("--cached");
else
args.unshift("--prefer-offline");
}
if (agent === "pnpm") {
args.unshift(
/**
* Prevent pnpm from removing installed devDeps while `NODE_ENV` is `production`
* @see https://pnpm.io/cli/install#--prod--p
*/
"--prod=false"
);
if (existsSync(resolve(options.cwd ?? process2.cwd(), "pnpm-workspace.yaml"))) {
args.unshift("-w");
}
}
return x(
agent,
[
agent === "yarn" ? "add" : "install",
options.dev ? "-D" : "",
...args,
...names
].filter(Boolean),
{
nodeOptions: {
stdio: options.silent ? "ignore" : "inherit",
cwd: options.cwd
},
throwOnError: true
}
);
}
// src/uninstall.ts
import { existsSync as existsSync2 } from "node:fs";
import process3 from "node:process";
import { resolve as resolve2 } from "node:path";
import { x as x2 } from "tinyexec";
async function uninstallPackage(names, options = {}) {
const detectedAgent = options.packageManager || await detectPackageManager(options.cwd) || "npm";
const [agent] = detectedAgent.split("@");
if (!Array.isArray(names))
names = [names];
const args = options.additionalArgs || [];
if (agent === "pnpm" && existsSync2(resolve2(options.cwd ?? process3.cwd(), "pnpm-workspace.yaml")))
args.unshift("-w");
return x2(
agent,
[
agent === "yarn" ? "remove" : "uninstall",
options.dev ? "-D" : "",
...args,
...names
].filter(Boolean),
{
nodeOptions: {
stdio: options.silent ? "ignore" : "inherit",
cwd: options.cwd
},
throwOnError: true
}
);
}
export {
detectPackageManager,
installPackage,
uninstallPackage
};
+58
View File
@@ -0,0 +1,58 @@
{
"name": "@antfu/install-pkg",
"type": "module",
"version": "1.1.0",
"description": "Install package programmatically.",
"author": "Anthony Fu <anthonyfu117@hotmail.com>",
"license": "MIT",
"funding": "https://github.com/sponsors/antfu",
"homepage": "https://github.com/antfu/install-pkg#readme",
"repository": {
"type": "git",
"url": "git+https://github.com/antfu/install-pkg.git"
},
"bugs": {
"url": "https://github.com/antfu/install-pkg/issues"
},
"sideEffects": false,
"exports": {
".": {
"import": {
"types": "./dist/index.d.ts",
"default": "./dist/index.js"
},
"require": {
"types": "./dist/index.d.cts",
"default": "./dist/index.cjs"
}
}
},
"main": "dist/index.cjs",
"module": "dist/index.js",
"types": "dist/index.d.ts",
"files": [
"dist"
],
"dependencies": {
"package-manager-detector": "^1.3.0",
"tinyexec": "^1.0.1"
},
"devDependencies": {
"@antfu/eslint-config": "^4.12.1",
"@antfu/ni": "^24.3.0",
"@types/node": "^22.15.12",
"bumpp": "^10.1.0",
"eslint": "^9.26.0",
"publint": "^0.3.12",
"tsup": "^8.4.0",
"tsx": "^4.19.4",
"typescript": "^5.8.3"
},
"scripts": {
"dev": "nr build --watch",
"start": "tsx src/index.ts",
"build": "tsup src/index.ts --format cjs,esm --dts --no-splitting",
"release": "bumpp --commit --push --tag && pnpm publish",
"lint": "eslint ."
}
}
+21
View File
@@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2015 James Messinger
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
+164
View File
@@ -0,0 +1,164 @@
# JSON Schema $Ref Parser
#### Parse, Resolve, and Dereference JSON Schema $ref pointers
[![Build Status](https://github.com/APIDevTools/json-schema-ref-parser/workflows/CI-CD/badge.svg?branch=master)](https://github.com/APIDevTools/json-schema-ref-parser/actions)
[![Coverage Status](https://coveralls.io/repos/github/APIDevTools/json-schema-ref-parser/badge.svg?branch=master)](https://coveralls.io/github/APIDevTools/json-schema-ref-parser)
[![npm](https://img.shields.io/npm/v/@apidevtools/json-schema-ref-parser.svg)](https://www.npmjs.com/package/@apidevtools/json-schema-ref-parser)
[![License](https://img.shields.io/npm/l/@apidevtools/json-schema-ref-parser.svg)](LICENSE)
[![Buy us a tree](https://img.shields.io/badge/Treeware-%F0%9F%8C%B3-lightgreen)](https://plant.treeware.earth/APIDevTools/json-schema-ref-parser)
## Installation
Install using [npm](https://docs.npmjs.com/about-npm/):
```bash
npm install @apidevtools/json-schema-ref-parser
yarn add @apidevtools/json-schema-ref-parser
bun add @apidevtools/json-schema-ref-parser
```
## The Problem:
You've got a JSON Schema with `$ref` pointers to other files and/or URLs. Maybe you know all the referenced files ahead
of time. Maybe you don't. Maybe some are local files, and others are remote URLs. Maybe they are a mix of JSON and YAML
format. Maybe some of the files contain cross-references to each other.
```json
{
"definitions": {
"person": {
// references an external file
"$ref": "schemas/people/Bruce-Wayne.json"
},
"place": {
// references a sub-schema in an external file
"$ref": "schemas/places.yaml#/definitions/Gotham-City"
},
"thing": {
// references a URL
"$ref": "http://wayne-enterprises.com/things/batmobile"
},
"color": {
// references a value in an external file via an internal reference
"$ref": "#/definitions/thing/properties/colors/black-as-the-night"
}
}
}
```
## The Solution:
JSON Schema $Ref Parser is a full [JSON Reference](https://tools.ietf.org/html/draft-pbryan-zyp-json-ref-03)
and [JSON Pointer](https://tools.ietf.org/html/rfc6901) implementation that crawls even the most
complex [JSON Schemas](http://json-schema.org/latest/json-schema-core.html) and gives you simple, straightforward
JavaScript objects.
- Use **JSON** or **YAML** schemas &mdash; or even a mix of both!
- Supports `$ref` pointers to external files and URLs, as well
as [custom sources](https://apidevtools.com/json-schema-ref-parser/docs/plugins/resolvers.html) such as databases
- Can [bundle](https://apidevtools.com/json-schema-ref-parser/docs/ref-parser.html#bundlepath-options-callback) multiple
files into a single schema that only has _internal_ `$ref` pointers
- Can [dereference](https://apidevtools.com/json-schema-ref-parser/docs/ref-parser.html#dereferencepath-options-callback)
your schema, producing a plain-old JavaScript object that's easy to work with
- Supports [circular references](https://apidevtools.com/json-schema-ref-parser/docs/#circular-refs), nested references,
back-references, and cross-references between files
- Maintains object reference equality &mdash; `$ref` pointers to the same value always resolve to the same object
instance
- Compatible with Node LTS and beyond, and all major web browsers on Windows, Mac, and Linux
## Example
```javascript
import $RefParser from "@apidevtools/json-schema-ref-parser";
try {
await $RefParser.dereference(mySchema);
// note - by default, mySchema is modified in place, and the returned value is a reference to the same object
console.log(mySchema.definitions.person.properties.firstName);
// if you want to avoid modifying the original schema, you can disable the `mutateInputSchema` option
let clonedSchema = await $RefParser.dereference(mySchema, { mutateInputSchema: false });
console.log(clonedSchema.definitions.person.properties.firstName);
} catch (err) {
console.error(err);
}
```
For more detailed examples, please see the [API Documentation](https://apidevtools.com/json-schema-ref-parser/docs/)
## Polyfills
If you are using Node.js < 18, you'll need a polyfill for `fetch`,
like [node-fetch](https://github.com/node-fetch/node-fetch):
```javascript
import fetch from "node-fetch";
globalThis.fetch = fetch;
```
## Browser support
JSON Schema $Ref Parser supports recent versions of every major web browser. Older browsers may
require [Babel](https://babeljs.io/) and/or [polyfills](https://babeljs.io/docs/en/next/babel-polyfill).
To use JSON Schema $Ref Parser in a browser, you'll need to use a bundling tool such
as [Webpack](https://webpack.js.org/), [Rollup](https://rollupjs.org/), [Parcel](https://parceljs.org/),
or [Browserify](http://browserify.org/). Some bundlers may require a bit of configuration, such as
setting `browser: true` in [rollup-plugin-resolve](https://github.com/rollup/rollup-plugin-node-resolve).
#### Webpack 5
Webpack 5 has dropped the default export of node core modules in favour of polyfills, you'll need to set them up
yourself ( after npm-installing them )
Edit your `webpack.config.js` :
```js
config.resolve.fallback = {
path: require.resolve("path-browserify"),
fs: require.resolve("browserify-fs"),
};
config.plugins.push(
new webpack.ProvidePlugin({
Buffer: ["buffer", "Buffer"],
}),
);
```
## API Documentation
Full API documentation is available [right here](https://apidevtools.com/json-schema-ref-parser/docs/)
## Contributing
I welcome any contributions, enhancements, and
bug-fixes. [Open an issue](https://github.com/APIDevTools/json-schema-ref-parser/issues) on GitHub
and [submit a pull request](https://github.com/APIDevTools/json-schema-ref-parser/pulls).
#### Building/Testing
To build/test the project locally on your computer:
1. **Clone this repo**<br>
`git clone https://github.com/APIDevTools/json-schema-ref-parser.git`
2. **Install dependencies**<br>
`yarn install`
3. **Run the tests**<br>
`yarn test`
## License
JSON Schema $Ref Parser is 100% free and open-source, under the [MIT license](LICENSE). Use it however you want.
## Thanks
Thanks to these awesome contributors for their major support of this open-source project.
- [JonLuca De Caro](https://jonlu.ca)
- [Phil Sturgeon](https://philsturgeon.com)
- [Stoplight](https://stoplight.io/?utm_source=github&utm_medium=readme&utm_campaign=json_schema_ref_parser)
+27
View File
@@ -0,0 +1,27 @@
import type $RefParser from "./index";
import type { ParserOptions } from "./index";
import type { JSONSchema } from "./index";
export interface InventoryEntry {
$ref: any;
parent: any;
key: any;
pathFromRoot: any;
depth: any;
file: any;
hash: any;
value: any;
circular: any;
extended: any;
external: any;
indirections: any;
}
/**
* Bundles all external JSON references into the main JSON schema, thus resulting in a schema that
* only has *internal* references, not any *external* references.
* This method mutates the JSON schema object, adding new references and re-mapping existing ones.
*
* @param parser
* @param options
*/
declare function bundle<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(parser: $RefParser<S, O>, options: O): void;
export default bundle;
+302
View File
@@ -0,0 +1,302 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || (function () {
var ownKeys = function(o) {
ownKeys = Object.getOwnPropertyNames || function (o) {
var ar = [];
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
return ar;
};
return ownKeys(o);
};
return function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
__setModuleDefault(result, mod);
return result;
};
})();
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const ref_js_1 = __importDefault(require("./ref.js"));
const pointer_js_1 = __importDefault(require("./pointer.js"));
const url = __importStar(require("./util/url.js"));
/**
* Bundles all external JSON references into the main JSON schema, thus resulting in a schema that
* only has *internal* references, not any *external* references.
* This method mutates the JSON schema object, adding new references and re-mapping existing ones.
*
* @param parser
* @param options
*/
function bundle(parser, options) {
// console.log('Bundling $ref pointers in %s', parser.$refs._root$Ref.path);
// Build an inventory of all $ref pointers in the JSON Schema
const inventory = [];
crawl(parser, "schema", parser.$refs._root$Ref.path + "#", "#", 0, inventory, parser.$refs, options);
// Remap all $ref pointers
remap(inventory);
}
/**
* Recursively crawls the given value, and inventories all JSON references.
*
* @param parent - The object containing the value to crawl. If the value is not an object or array, it will be ignored.
* @param key - The property key of `parent` to be crawled
* @param path - The full path of the property being crawled, possibly with a JSON Pointer in the hash
* @param pathFromRoot - The path of the property being crawled, from the schema root
* @param indirections
* @param inventory - An array of already-inventoried $ref pointers
* @param $refs
* @param options
*/
function crawl(parent, key, path, pathFromRoot, indirections, inventory, $refs, options) {
const obj = key === null ? parent : parent[key];
const bundleOptions = (options.bundle || {});
const isExcludedPath = bundleOptions.excludedPathMatcher || (() => false);
if (obj && typeof obj === "object" && !ArrayBuffer.isView(obj) && !isExcludedPath(pathFromRoot)) {
if (ref_js_1.default.isAllowed$Ref(obj)) {
inventory$Ref(parent, key, path, pathFromRoot, indirections, inventory, $refs, options);
}
else {
// Crawl the object in a specific order that's optimized for bundling.
// This is important because it determines how `pathFromRoot` gets built,
// which later determines which keys get dereferenced and which ones get remapped
const keys = Object.keys(obj).sort((a, b) => {
// Most people will expect references to be bundled into the the "definitions" property,
// so we always crawl that property first, if it exists.
if (a === "definitions" || a === "$defs") {
return -1;
}
else if (b === "definitions" || b === "$defs") {
return 1;
}
else {
// Otherwise, crawl the keys based on their length.
// This produces the shortest possible bundled references
return a.length - b.length;
}
});
for (const key of keys) {
const keyPath = pointer_js_1.default.join(path, key);
const keyPathFromRoot = pointer_js_1.default.join(pathFromRoot, key);
const value = obj[key];
if (ref_js_1.default.isAllowed$Ref(value)) {
inventory$Ref(obj, key, path, keyPathFromRoot, indirections, inventory, $refs, options);
}
else {
crawl(obj, key, keyPath, keyPathFromRoot, indirections, inventory, $refs, options);
}
// We need to ensure that we have an object to work with here because we may be crawling
// an `examples` schema and `value` may be nullish.
if (value && typeof value === "object" && !Array.isArray(value)) {
if ("$ref" in value) {
bundleOptions?.onBundle?.(value["$ref"], obj[key], obj, key);
}
}
}
}
}
}
/**
* Inventories the given JSON Reference (i.e. records detailed information about it so we can
* optimize all $refs in the schema), and then crawls the resolved value.
*
* @param $refParent - The object that contains a JSON Reference as one of its keys
* @param $refKey - The key in `$refParent` that is a JSON Reference
* @param path - The full path of the JSON Reference at `$refKey`, possibly with a JSON Pointer in the hash
* @param indirections - unknown
* @param pathFromRoot - The path of the JSON Reference at `$refKey`, from the schema root
* @param inventory - An array of already-inventoried $ref pointers
* @param $refs
* @param options
*/
function inventory$Ref($refParent, $refKey, path, pathFromRoot, indirections, inventory, $refs, options) {
const $ref = $refKey === null ? $refParent : $refParent[$refKey];
const $refPath = url.resolve(path, $ref.$ref);
const pointer = $refs._resolve($refPath, pathFromRoot, options);
if (pointer === null) {
return;
}
const parsed = pointer_js_1.default.parse(pathFromRoot);
const depth = parsed.length;
const file = url.stripHash(pointer.path);
const hash = url.getHash(pointer.path);
const external = file !== $refs._root$Ref.path;
const extended = ref_js_1.default.isExtended$Ref($ref);
indirections += pointer.indirections;
const existingEntry = findInInventory(inventory, $refParent, $refKey);
if (existingEntry) {
// This $Ref has already been inventoried, so we don't need to process it again
if (depth < existingEntry.depth || indirections < existingEntry.indirections) {
removeFromInventory(inventory, existingEntry);
}
else {
return;
}
}
inventory.push({
$ref, // The JSON Reference (e.g. {$ref: string})
parent: $refParent, // The object that contains this $ref pointer
key: $refKey, // The key in `parent` that is the $ref pointer
pathFromRoot, // The path to the $ref pointer, from the JSON Schema root
depth, // How far from the JSON Schema root is this $ref pointer?
file, // The file that the $ref pointer resolves to
hash, // The hash within `file` that the $ref pointer resolves to
value: pointer.value, // The resolved value of the $ref pointer
circular: pointer.circular, // Is this $ref pointer DIRECTLY circular? (i.e. it references itself)
extended, // Does this $ref extend its resolved value? (i.e. it has extra properties, in addition to "$ref")
external, // Does this $ref pointer point to a file other than the main JSON Schema file?
indirections, // The number of indirect references that were traversed to resolve the value
});
// Recursively crawl the resolved value
if (!existingEntry || external) {
crawl(pointer.value, null, pointer.path, pathFromRoot, indirections + 1, inventory, $refs, options);
}
}
/**
* Re-maps every $ref pointer, so that they're all relative to the root of the JSON Schema.
* Each referenced value is dereferenced EXACTLY ONCE. All subsequent references to the same
* value are re-mapped to point to the first reference.
*
* @example: {
* first: { $ref: somefile.json#/some/part },
* second: { $ref: somefile.json#/another/part },
* third: { $ref: somefile.json },
* fourth: { $ref: somefile.json#/some/part/sub/part }
* }
*
* In this example, there are four references to the same file, but since the third reference points
* to the ENTIRE file, that's the only one we need to dereference. The other three can just be
* remapped to point inside the third one.
*
* On the other hand, if the third reference DIDN'T exist, then the first and second would both need
* to be dereferenced, since they point to different parts of the file. The fourth reference does NOT
* need to be dereferenced, because it can be remapped to point inside the first one.
*
* @param inventory
*/
function remap(inventory) {
// Group & sort all the $ref pointers, so they're in the order that we need to dereference/remap them
inventory.sort((a, b) => {
if (a.file !== b.file) {
// Group all the $refs that point to the same file
return a.file < b.file ? -1 : +1;
}
else if (a.hash !== b.hash) {
// Group all the $refs that point to the same part of the file
return a.hash < b.hash ? -1 : +1;
}
else if (a.circular !== b.circular) {
// If the $ref points to itself, then sort it higher than other $refs that point to this $ref
return a.circular ? -1 : +1;
}
else if (a.extended !== b.extended) {
// If the $ref extends the resolved value, then sort it lower than other $refs that don't extend the value
return a.extended ? +1 : -1;
}
else if (a.indirections !== b.indirections) {
// Sort direct references higher than indirect references
return a.indirections - b.indirections;
}
else if (a.depth !== b.depth) {
// Sort $refs by how close they are to the JSON Schema root
return a.depth - b.depth;
}
else {
// Determine how far each $ref is from the "definitions" property.
// Most people will expect references to be bundled into the the "definitions" property if possible.
const aDefinitionsIndex = Math.max(a.pathFromRoot.lastIndexOf("/definitions"), a.pathFromRoot.lastIndexOf("/$defs"));
const bDefinitionsIndex = Math.max(b.pathFromRoot.lastIndexOf("/definitions"), b.pathFromRoot.lastIndexOf("/$defs"));
if (aDefinitionsIndex !== bDefinitionsIndex) {
// Give higher priority to the $ref that's closer to the "definitions" property
return bDefinitionsIndex - aDefinitionsIndex;
}
else {
// All else is equal, so use the shorter path, which will produce the shortest possible reference
return a.pathFromRoot.length - b.pathFromRoot.length;
}
}
});
let file, hash, pathFromRoot;
for (const entry of inventory) {
// console.log('Re-mapping $ref pointer "%s" at %s', entry.$ref.$ref, entry.pathFromRoot);
if (!entry.external) {
// This $ref already resolves to the main JSON Schema file
entry.$ref.$ref = entry.hash;
}
else if (entry.file === file && entry.hash === hash) {
// This $ref points to the same value as the prevous $ref, so remap it to the same path
entry.$ref.$ref = pathFromRoot;
}
else if (entry.file === file && entry.hash.indexOf(hash + "/") === 0) {
// This $ref points to a sub-value of the prevous $ref, so remap it beneath that path
entry.$ref.$ref = pointer_js_1.default.join(pathFromRoot, pointer_js_1.default.parse(entry.hash.replace(hash, "#")));
}
else {
// We've moved to a new file or new hash
file = entry.file;
hash = entry.hash;
pathFromRoot = entry.pathFromRoot;
// This is the first $ref to point to this value, so dereference the value.
// Any other $refs that point to the same value will point to this $ref instead
entry.$ref = entry.parent[entry.key] = ref_js_1.default.dereference(entry.$ref, entry.value);
if (entry.circular) {
// This $ref points to itself
entry.$ref.$ref = entry.pathFromRoot;
}
}
}
// we want to ensure that any $refs that point to another $ref are remapped to point to the final value
// let hadChange = true;
// while (hadChange) {
// hadChange = false;
// for (const entry of inventory) {
// if (entry.$ref && typeof entry.$ref === "object" && "$ref" in entry.$ref) {
// const resolved = inventory.find((e: InventoryEntry) => e.pathFromRoot === entry.$ref.$ref);
// if (resolved) {
// const resolvedPointsToAnotherRef =
// resolved.$ref && typeof resolved.$ref === "object" && "$ref" in resolved.$ref;
// if (resolvedPointsToAnotherRef && entry.$ref.$ref !== resolved.$ref.$ref) {
// // console.log('Re-mapping $ref pointer "%s" at %s', entry.$ref.$ref, entry.pathFromRoot);
// entry.$ref.$ref = resolved.$ref.$ref;
// hadChange = true;
// }
// }
// }
// }
// }
}
/**
* TODO
*/
function findInInventory(inventory, $refParent, $refKey) {
for (const existingEntry of inventory) {
if (existingEntry && existingEntry.parent === $refParent && existingEntry.key === $refKey) {
return existingEntry;
}
}
return undefined;
}
function removeFromInventory(inventory, entry) {
const index = inventory.indexOf(entry);
inventory.splice(index, 1);
}
exports.default = bundle;
@@ -0,0 +1,12 @@
import type { ParserOptions } from "./options.js";
import type { JSONSchema } from "./types";
import type $RefParser from "./index";
export default dereference;
/**
* Crawls the JSON schema, finds all JSON references, and dereferences them.
* This method mutates the JSON schema object, replacing JSON references with their resolved value.
*
* @param parser
* @param options
*/
declare function dereference<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(parser: $RefParser<S, O>, options: O): void;
@@ -0,0 +1,289 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || (function () {
var ownKeys = function(o) {
ownKeys = Object.getOwnPropertyNames || function (o) {
var ar = [];
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
return ar;
};
return ownKeys(o);
};
return function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
__setModuleDefault(result, mod);
return result;
};
})();
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const ref_js_1 = __importDefault(require("./ref.js"));
const pointer_js_1 = __importDefault(require("./pointer.js"));
const url = __importStar(require("./util/url.js"));
const errors_1 = require("./util/errors");
exports.default = dereference;
/**
* Crawls the JSON schema, finds all JSON references, and dereferences them.
* This method mutates the JSON schema object, replacing JSON references with their resolved value.
*
* @param parser
* @param options
*/
function dereference(parser, options) {
const start = Date.now();
// console.log('Dereferencing $ref pointers in %s', parser.$refs._root$Ref.path);
const dereferenced = crawl(parser.schema, parser.$refs._root$Ref.path, "#", new Set(), new Set(), new Map(), parser.$refs, options, start);
parser.$refs.circular = dereferenced.circular;
parser.schema = dereferenced.value;
}
/**
* Recursively crawls the given value, and dereferences any JSON references.
*
* @param obj - The value to crawl. If it's not an object or array, it will be ignored.
* @param path - The full path of `obj`, possibly with a JSON Pointer in the hash
* @param pathFromRoot - The path of `obj` from the schema root
* @param parents - An array of the parent objects that have already been dereferenced
* @param processedObjects - An array of all the objects that have already been processed
* @param dereferencedCache - An map of all the dereferenced objects
* @param $refs
* @param options
* @param startTime - The time when the dereferencing started
* @returns
*/
function crawl(obj, path, pathFromRoot, parents, processedObjects, dereferencedCache, $refs, options, startTime) {
let dereferenced;
const result = {
value: obj,
circular: false,
};
checkDereferenceTimeout(startTime, options);
const derefOptions = (options.dereference || {});
const isExcludedPath = derefOptions.excludedPathMatcher || (() => false);
if (derefOptions?.circular === "ignore" || !processedObjects.has(obj)) {
if (obj && typeof obj === "object" && !ArrayBuffer.isView(obj) && !isExcludedPath(pathFromRoot)) {
parents.add(obj);
processedObjects.add(obj);
if (ref_js_1.default.isAllowed$Ref(obj, options)) {
dereferenced = dereference$Ref(obj, path, pathFromRoot, parents, processedObjects, dereferencedCache, $refs, options, startTime);
result.circular = dereferenced.circular;
result.value = dereferenced.value;
}
else {
for (const key of Object.keys(obj)) {
checkDereferenceTimeout(startTime, options);
const keyPath = pointer_js_1.default.join(path, key);
const keyPathFromRoot = pointer_js_1.default.join(pathFromRoot, key);
if (isExcludedPath(keyPathFromRoot)) {
continue;
}
const value = obj[key];
let circular = false;
if (ref_js_1.default.isAllowed$Ref(value, options)) {
dereferenced = dereference$Ref(value, keyPath, keyPathFromRoot, parents, processedObjects, dereferencedCache, $refs, options, startTime);
circular = dereferenced.circular;
// Avoid pointless mutations; breaks frozen objects to no profit
if (obj[key] !== dereferenced.value) {
// If we have properties we want to preserve from our dereferenced schema then we need
// to copy them over to our new object.
const preserved = new Map();
if (derefOptions?.preservedProperties) {
if (typeof obj[key] === "object" && !Array.isArray(obj[key])) {
derefOptions?.preservedProperties.forEach((prop) => {
if (prop in obj[key]) {
preserved.set(prop, obj[key][prop]);
}
});
}
}
obj[key] = dereferenced.value;
// If we have data to preserve and our dereferenced object is still an object then
// we need copy back our preserved data into our dereferenced schema.
if (derefOptions?.preservedProperties) {
if (preserved.size && typeof obj[key] === "object" && !Array.isArray(obj[key])) {
preserved.forEach((value, prop) => {
obj[key][prop] = value;
});
}
}
derefOptions?.onDereference?.(value.$ref, obj[key], obj, key);
}
}
else {
if (!parents.has(value)) {
dereferenced = crawl(value, keyPath, keyPathFromRoot, parents, processedObjects, dereferencedCache, $refs, options, startTime);
circular = dereferenced.circular;
// Avoid pointless mutations; breaks frozen objects to no profit
if (obj[key] !== dereferenced.value) {
obj[key] = dereferenced.value;
}
}
else {
circular = foundCircularReference(keyPath, $refs, options);
}
}
// Set the "isCircular" flag if this or any other property is circular
result.circular = result.circular || circular;
}
}
parents.delete(obj);
}
}
return result;
}
/**
* Dereferences the given JSON Reference, and then crawls the resulting value.
*
* @param $ref - The JSON Reference to resolve
* @param path - The full path of `$ref`, possibly with a JSON Pointer in the hash
* @param pathFromRoot - The path of `$ref` from the schema root
* @param parents - An array of the parent objects that have already been dereferenced
* @param processedObjects - An array of all the objects that have already been dereferenced
* @param dereferencedCache - An map of all the dereferenced objects
* @param $refs
* @param options
* @returns
*/
function dereference$Ref($ref, path, pathFromRoot, parents, processedObjects, dereferencedCache, $refs, options, startTime) {
const isExternalRef = ref_js_1.default.isExternal$Ref($ref);
const shouldResolveOnCwd = isExternalRef && options?.dereference?.externalReferenceResolution === "root";
const $refPath = url.resolve(shouldResolveOnCwd ? url.cwd() : path, $ref.$ref);
const cache = dereferencedCache.get($refPath);
if (cache) {
// If the object we found is circular we can immediately return it because it would have been
// cached with everything we need already and we don't need to re-process anything inside it.
//
// If the cached object however is _not_ circular and there are additional keys alongside our
// `$ref` pointer here we should merge them back in and return that.
if (!cache.circular) {
const refKeys = Object.keys($ref);
if (refKeys.length > 1) {
const extraKeys = {};
for (const key of refKeys) {
if (key !== "$ref" && !(key in cache.value)) {
// @ts-expect-error TS(7053): Element implicitly has an 'any' type because expre... Remove this comment to see the full error message
extraKeys[key] = $ref[key];
}
}
return {
circular: cache.circular,
value: Object.assign({}, cache.value, extraKeys),
};
}
return cache;
}
// If both our cached value and our incoming `$ref` are the same then we can return what we
// got out of the cache, otherwise we should re-process this value. We need to do this because
// the current dereference caching mechanism doesn't take into account that `$ref` are neither
// unique or reference the same file.
//
// For example if `schema.yaml` references `definitions/child.yaml` and
// `definitions/parent.yaml` references `child.yaml` then `$ref: 'child.yaml'` may get cached
// for `definitions/child.yaml`, resulting in `schema.yaml` being having an invalid reference
// to `child.yaml`.
//
// This check is not perfect and the design of the dereference caching mechanism needs a total
// overhaul.
if (typeof cache.value === "object" && "$ref" in cache.value && "$ref" in $ref) {
if (cache.value.$ref === $ref.$ref) {
return cache;
}
else {
// no-op
}
}
else {
return cache;
}
}
const pointer = $refs._resolve($refPath, path, options);
if (pointer === null) {
return {
circular: false,
value: null,
};
}
// Check for circular references
const directCircular = pointer.circular;
let circular = directCircular || parents.has(pointer.value);
if (circular) {
foundCircularReference(path, $refs, options);
}
// Dereference the JSON reference
let dereferencedValue = ref_js_1.default.dereference($ref, pointer.value);
// Crawl the dereferenced value (unless it's circular)
if (!circular) {
// Determine if the dereferenced value is circular
const dereferenced = crawl(dereferencedValue, pointer.path, pathFromRoot, parents, processedObjects, dereferencedCache, $refs, options, startTime);
circular = dereferenced.circular;
dereferencedValue = dereferenced.value;
}
if (circular && !directCircular && options.dereference?.circular === "ignore") {
// The user has chosen to "ignore" circular references, so don't change the value
dereferencedValue = $ref;
}
if (directCircular) {
// The pointer is a DIRECT circular reference (i.e. it references itself).
// So replace the $ref path with the absolute path from the JSON Schema root
dereferencedValue.$ref = pathFromRoot;
}
const dereferencedObject = {
circular,
value: dereferencedValue,
};
// only cache if no extra properties than $ref
if (Object.keys($ref).length === 1) {
dereferencedCache.set($refPath, dereferencedObject);
}
return dereferencedObject;
}
/**
* Check if we've run past our allowed timeout and throw an error if we have.
*
* @param startTime - The time when the dereferencing started.
* @param options
*/
function checkDereferenceTimeout(startTime, options) {
if (options && options.timeoutMs) {
if (Date.now() - startTime > options.timeoutMs) {
throw new errors_1.TimeoutError(options.timeoutMs);
}
}
}
/**
* Called when a circular reference is found.
* It sets the {@link $Refs#circular} flag, executes the options.dereference.onCircular callback,
* and throws an error if options.dereference.circular is false.
*
* @param keyPath - The JSON Reference path of the circular reference
* @param $refs
* @param options
* @returns - always returns true, to indicate that a circular reference was found
*/
function foundCircularReference(keyPath, $refs, options) {
$refs.circular = true;
options?.dereference?.onCircular?.(keyPath);
if (!options.dereference.circular) {
const error = new ReferenceError(`Circular $ref pointer found at ${keyPath}`);
throw error;
}
return true;
}
+163
View File
@@ -0,0 +1,163 @@
import $Refs from "./refs.js";
import normalizeArgs from "./normalize-args.js";
import _dereference from "./dereference.js";
import { JSONParserError, InvalidPointerError, MissingPointerError, ResolverError, ParserError, UnmatchedParserError, UnmatchedResolverError, isHandledError, JSONParserErrorGroup } from "./util/errors.js";
import type { ParserOptions } from "./options.js";
import { getJsonSchemaRefParserDefaultOptions } from "./options.js";
import type { $RefsCallback, JSONSchema, SchemaCallback, FileInfo, Plugin, ResolverOptions, HTTPResolverOptions } from "./types/index.js";
import { isUnsafeUrl } from "./util/url.js";
export type RefParserSchema = string | JSONSchema;
/**
* This class parses a JSON schema, builds a map of its JSON references and their resolved values,
* and provides methods for traversing, manipulating, and dereferencing those references.
*
* @class
*/
export declare class $RefParser<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>> {
/**
* The parsed (and possibly dereferenced) JSON schema object
*
* @type {object}
* @readonly
*/
schema: S | null;
/**
* The resolved JSON references
*
* @type {$Refs}
* @readonly
*/
$refs: $Refs<S, O>;
/**
* Parses the given JSON schema.
* This method does not resolve any JSON references.
* It just reads a single file in JSON or YAML format, and parse it as a JavaScript object.
*
* @param [path] - The file path or URL of the JSON schema
* @param [schema] - A JSON schema object. This object will be used instead of reading from `path`.
* @param [options] - Options that determine how the schema is parsed
* @param [callback] - An error-first callback. The second parameter is the parsed JSON schema object.
* @returns - The returned promise resolves with the parsed JSON schema object.
*/
parse(schema: S | string | unknown): Promise<S>;
parse(schema: S | string | unknown, callback: SchemaCallback<S>): Promise<void>;
parse(schema: S | string | unknown, options: O): Promise<S>;
parse(schema: S | string | unknown, options: O, callback: SchemaCallback<S>): Promise<void>;
parse(path: string, schema: S | string | unknown, options: O): Promise<S>;
parse(path: string, schema: S | string | unknown, options: O, callback: SchemaCallback<S>): Promise<void>;
static parse<S extends object = JSONSchema>(schema: S | string | unknown): Promise<S>;
static parse<S extends object = JSONSchema>(schema: S | string | unknown, callback: SchemaCallback<S>): Promise<void>;
static parse<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(schema: S | string | unknown, options: O): Promise<S>;
static parse<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(schema: S | string | unknown, options: O, callback: SchemaCallback<S>): Promise<void>;
static parse<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(path: string, schema: S | string | unknown, options: O): Promise<S>;
static parse<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(path: string, schema: S | string | unknown, options: O, callback: SchemaCallback<S>): Promise<void>;
/**
* *This method is used internally by other methods, such as `bundle` and `dereference`. You probably won't need to call this method yourself.*
*
* Resolves all JSON references (`$ref` pointers) in the given JSON Schema file. If it references any other files/URLs, then they will be downloaded and resolved as well. This method **does not** dereference anything. It simply gives you a `$Refs` object, which is a map of all the resolved references and their values.
*
* See https://apidevtools.com/json-schema-ref-parser/docs/ref-parser.html#resolveschema-options-callback
*
* @param schema A JSON Schema object, or the file path or URL of a JSON Schema file. See the `parse` method for more info.
* @param options (optional)
* @param callback (optional) A callback that will receive a `$Refs` object
*/
resolve(schema: S | string | unknown): Promise<$Refs<S, O>>;
resolve(schema: S | string | unknown, callback: $RefsCallback<S, O>): Promise<void>;
resolve(schema: S | string | unknown, options: O): Promise<$Refs<S, O>>;
resolve(schema: S | string | unknown, options: O, callback: $RefsCallback<S, O>): Promise<void>;
resolve(path: string, schema: S | string | unknown, options: O): Promise<$Refs<S, O>>;
resolve(path: string, schema: S | string | unknown, options: O, callback: $RefsCallback<S, O>): Promise<void>;
/**
* *This method is used internally by other methods, such as `bundle` and `dereference`. You probably won't need to call this method yourself.*
*
* Resolves all JSON references (`$ref` pointers) in the given JSON Schema file. If it references any other files/URLs, then they will be downloaded and resolved as well. This method **does not** dereference anything. It simply gives you a `$Refs` object, which is a map of all the resolved references and their values.
*
* See https://apidevtools.com/json-schema-ref-parser/docs/ref-parser.html#resolveschema-options-callback
*
* @param schema A JSON Schema object, or the file path or URL of a JSON Schema file. See the `parse` method for more info.
* @param options (optional)
* @param callback (optional) A callback that will receive a `$Refs` object
*/
static resolve<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(schema: S | string | unknown): Promise<$Refs<S, O>>;
static resolve<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(schema: S | string | unknown, callback: $RefsCallback<S, O>): Promise<void>;
static resolve<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(schema: S | string | unknown, options: O): Promise<$Refs<S, O>>;
static resolve<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(schema: S | string | unknown, options: O, callback: $RefsCallback<S, O>): Promise<void>;
static resolve<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(path: string, schema: S | string | unknown, options: O): Promise<$Refs<S, O>>;
static resolve<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(path: string, schema: S | string | unknown, options: O, callback: $RefsCallback<S, O>): Promise<void>;
/**
* Bundles all referenced files/URLs into a single schema that only has internal `$ref` pointers. This lets you split-up your schema however you want while you're building it, but easily combine all those files together when it's time to package or distribute the schema to other people. The resulting schema size will be small, since it will still contain internal JSON references rather than being fully-dereferenced.
*
* This also eliminates the risk of circular references, so the schema can be safely serialized using `JSON.stringify()`.
*
* See https://apidevtools.com/json-schema-ref-parser/docs/ref-parser.html#bundleschema-options-callback
*
* @param schema A JSON Schema object, or the file path or URL of a JSON Schema file. See the `parse` method for more info.
* @param options (optional)
* @param callback (optional) A callback that will receive the bundled schema object
*/
static bundle<S extends object = JSONSchema>(schema: S | string | unknown): Promise<S>;
static bundle<S extends object = JSONSchema>(schema: S | string | unknown, callback: SchemaCallback<S>): Promise<void>;
static bundle<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(schema: S | string | unknown, options: O): Promise<S>;
static bundle<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(schema: S | string | unknown, options: O, callback: SchemaCallback<S>): Promise<void>;
static bundle<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(path: string, schema: S | string | unknown, options: O): Promise<S>;
static bundle<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(path: string, schema: S | string | unknown, options: O, callback: SchemaCallback<S>): Promise<S>;
/**
* Bundles all referenced files/URLs into a single schema that only has internal `$ref` pointers. This lets you split-up your schema however you want while you're building it, but easily combine all those files together when it's time to package or distribute the schema to other people. The resulting schema size will be small, since it will still contain internal JSON references rather than being fully-dereferenced.
*
* This also eliminates the risk of circular references, so the schema can be safely serialized using `JSON.stringify()`.
*
* See https://apidevtools.com/json-schema-ref-parser/docs/ref-parser.html#bundleschema-options-callback
*
* @param schema A JSON Schema object, or the file path or URL of a JSON Schema file. See the `parse` method for more info.
* @param options (optional)
* @param callback (optional) A callback that will receive the bundled schema object
*/
bundle(schema: S | string | unknown): Promise<S>;
bundle(schema: S | string | unknown, callback: SchemaCallback<S>): Promise<void>;
bundle(schema: S | string | unknown, options: O): Promise<S>;
bundle(schema: S | string | unknown, options: O, callback: SchemaCallback<S>): Promise<void>;
bundle(path: string, schema: S | string | unknown, options: O): Promise<S>;
bundle(path: string, schema: S | string | unknown, options: O, callback: SchemaCallback<S>): Promise<void>;
/**
* Dereferences all `$ref` pointers in the JSON Schema, replacing each reference with its resolved value. This results in a schema object that does not contain any `$ref` pointers. Instead, it's a normal JavaScript object tree that can easily be crawled and used just like any other JavaScript object. This is great for programmatic usage, especially when using tools that don't understand JSON references.
*
* The dereference method maintains object reference equality, meaning that all `$ref` pointers that point to the same object will be replaced with references to the same object. Again, this is great for programmatic usage, but it does introduce the risk of circular references, so be careful if you intend to serialize the schema using `JSON.stringify()`. Consider using the bundle method instead, which does not create circular references.
*
* See https://apidevtools.com/json-schema-ref-parser/docs/ref-parser.html#dereferenceschema-options-callback
*
* @param schema A JSON Schema object, or the file path or URL of a JSON Schema file. See the `parse` method for more info.
* @param options (optional)
* @param callback (optional) A callback that will receive the dereferenced schema object
*/
static dereference<S extends object = JSONSchema>(schema: S | string | unknown): Promise<S>;
static dereference<S extends object = JSONSchema>(schema: S | string | unknown, callback: SchemaCallback<S>): Promise<void>;
static dereference<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(schema: S | string | unknown, options: O): Promise<S>;
static dereference<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(schema: S | string | unknown, options: O, callback: SchemaCallback<S>): Promise<void>;
static dereference<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(path: string, schema: S | string | unknown, options: O): Promise<S>;
static dereference<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(path: string, schema: S | string | unknown, options: O, callback: SchemaCallback<S>): Promise<void>;
/**
* Dereferences all `$ref` pointers in the JSON Schema, replacing each reference with its resolved value. This results in a schema object that does not contain any `$ref` pointers. Instead, it's a normal JavaScript object tree that can easily be crawled and used just like any other JavaScript object. This is great for programmatic usage, especially when using tools that don't understand JSON references.
*
* The dereference method maintains object reference equality, meaning that all `$ref` pointers that point to the same object will be replaced with references to the same object. Again, this is great for programmatic usage, but it does introduce the risk of circular references, so be careful if you intend to serialize the schema using `JSON.stringify()`. Consider using the bundle method instead, which does not create circular references.
*
* See https://apidevtools.com/json-schema-ref-parser/docs/ref-parser.html#dereferenceschema-options-callback
*
* @param path
* @param schema A JSON Schema object, or the file path or URL of a JSON Schema file. See the `parse` method for more info.
* @param options (optional)
* @param callback (optional) A callback that will receive the dereferenced schema object
*/
dereference(path: string, schema: S | string | unknown, options: O, callback: SchemaCallback<S>): Promise<void>;
dereference(schema: S | string | unknown, options: O, callback: SchemaCallback<S>): Promise<void>;
dereference(schema: S | string | unknown, callback: SchemaCallback<S>): Promise<void>;
dereference(path: string, schema: S | string | unknown, options: O): Promise<S>;
dereference(schema: S | string | unknown, options: O): Promise<S>;
dereference(schema: S | string | unknown): Promise<S>;
}
export default $RefParser;
export declare const parse: typeof $RefParser.parse;
export declare const resolve: typeof $RefParser.resolve;
export declare const bundle: typeof $RefParser.bundle;
export declare const dereference: typeof $RefParser.dereference;
export { UnmatchedResolverError, JSONParserError, JSONSchema, InvalidPointerError, MissingPointerError, ResolverError, ParserError, UnmatchedParserError, ParserOptions, $RefsCallback, isHandledError, JSONParserErrorGroup, SchemaCallback, FileInfo, Plugin, ResolverOptions, HTTPResolverOptions, _dereference as dereferenceInternal, normalizeArgs as jsonSchemaParserNormalizeArgs, getJsonSchemaRefParserDefaultOptions, $Refs, isUnsafeUrl, };
+216
View File
@@ -0,0 +1,216 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || (function () {
var ownKeys = function(o) {
ownKeys = Object.getOwnPropertyNames || function (o) {
var ar = [];
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
return ar;
};
return ownKeys(o);
};
return function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
__setModuleDefault(result, mod);
return result;
};
})();
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.isUnsafeUrl = exports.$Refs = exports.getJsonSchemaRefParserDefaultOptions = exports.jsonSchemaParserNormalizeArgs = exports.dereferenceInternal = exports.JSONParserErrorGroup = exports.isHandledError = exports.UnmatchedParserError = exports.ParserError = exports.ResolverError = exports.MissingPointerError = exports.InvalidPointerError = exports.JSONParserError = exports.UnmatchedResolverError = exports.dereference = exports.bundle = exports.resolve = exports.parse = exports.$RefParser = void 0;
const refs_js_1 = __importDefault(require("./refs.js"));
exports.$Refs = refs_js_1.default;
const parse_js_1 = __importDefault(require("./parse.js"));
const normalize_args_js_1 = __importDefault(require("./normalize-args.js"));
exports.jsonSchemaParserNormalizeArgs = normalize_args_js_1.default;
const resolve_external_js_1 = __importDefault(require("./resolve-external.js"));
const bundle_js_1 = __importDefault(require("./bundle.js"));
const dereference_js_1 = __importDefault(require("./dereference.js"));
exports.dereferenceInternal = dereference_js_1.default;
const url = __importStar(require("./util/url.js"));
const errors_js_1 = require("./util/errors.js");
Object.defineProperty(exports, "JSONParserError", { enumerable: true, get: function () { return errors_js_1.JSONParserError; } });
Object.defineProperty(exports, "InvalidPointerError", { enumerable: true, get: function () { return errors_js_1.InvalidPointerError; } });
Object.defineProperty(exports, "MissingPointerError", { enumerable: true, get: function () { return errors_js_1.MissingPointerError; } });
Object.defineProperty(exports, "ResolverError", { enumerable: true, get: function () { return errors_js_1.ResolverError; } });
Object.defineProperty(exports, "ParserError", { enumerable: true, get: function () { return errors_js_1.ParserError; } });
Object.defineProperty(exports, "UnmatchedParserError", { enumerable: true, get: function () { return errors_js_1.UnmatchedParserError; } });
Object.defineProperty(exports, "UnmatchedResolverError", { enumerable: true, get: function () { return errors_js_1.UnmatchedResolverError; } });
Object.defineProperty(exports, "isHandledError", { enumerable: true, get: function () { return errors_js_1.isHandledError; } });
Object.defineProperty(exports, "JSONParserErrorGroup", { enumerable: true, get: function () { return errors_js_1.JSONParserErrorGroup; } });
const maybe_js_1 = __importDefault(require("./util/maybe.js"));
const options_js_1 = require("./options.js");
Object.defineProperty(exports, "getJsonSchemaRefParserDefaultOptions", { enumerable: true, get: function () { return options_js_1.getJsonSchemaRefParserDefaultOptions; } });
const url_js_1 = require("./util/url.js");
Object.defineProperty(exports, "isUnsafeUrl", { enumerable: true, get: function () { return url_js_1.isUnsafeUrl; } });
/**
* This class parses a JSON schema, builds a map of its JSON references and their resolved values,
* and provides methods for traversing, manipulating, and dereferencing those references.
*
* @class
*/
class $RefParser {
/**
* The parsed (and possibly dereferenced) JSON schema object
*
* @type {object}
* @readonly
*/
schema = null;
/**
* The resolved JSON references
*
* @type {$Refs}
* @readonly
*/
$refs = new refs_js_1.default();
async parse() {
const args = (0, normalize_args_js_1.default)(arguments);
let promise;
if (!args.path && !args.schema) {
const err = new Error(`Expected a file path, URL, or object. Got ${args.path || args.schema}`);
return (0, maybe_js_1.default)(args.callback, Promise.reject(err));
}
// Reset everything
this.schema = null;
this.$refs = new refs_js_1.default();
// If the path is a filesystem path, then convert it to a URL.
// NOTE: According to the JSON Reference spec, these should already be URLs,
// but, in practice, many people use local filesystem paths instead.
// So we're being generous here and doing the conversion automatically.
// This is not intended to be a 100% bulletproof solution.
// If it doesn't work for your use-case, then use a URL instead.
let pathType = "http";
if (url.isFileSystemPath(args.path)) {
args.path = url.fromFileSystemPath(args.path);
pathType = "file";
}
else if (!args.path && args.schema && "$id" in args.schema && args.schema.$id) {
// when schema id has defined an URL should use that hostname to request the references,
// instead of using the current page URL
const params = url.parse(args.schema.$id);
const port = params.protocol === "https:" ? 443 : 80;
args.path = `${params.protocol}//${params.hostname}:${port}`;
}
// Resolve the absolute path of the schema
args.path = url.resolve(url.cwd(), args.path);
if (args.schema && typeof args.schema === "object") {
// A schema object was passed-in.
// So immediately add a new $Ref with the schema object as its value
const $ref = this.$refs._add(args.path);
$ref.value = args.schema;
$ref.pathType = pathType;
promise = Promise.resolve(args.schema);
}
else {
// Parse the schema file/url
promise = (0, parse_js_1.default)(args.path, this.$refs, args.options);
}
try {
const result = await promise;
if (result !== null && typeof result === "object" && !Buffer.isBuffer(result)) {
this.schema = result;
return (0, maybe_js_1.default)(args.callback, Promise.resolve(this.schema));
}
else if (args.options.continueOnError) {
this.schema = null; // it's already set to null at line 79, but let's set it again for the sake of readability
return (0, maybe_js_1.default)(args.callback, Promise.resolve(this.schema));
}
else {
throw new SyntaxError(`"${this.$refs._root$Ref.path || result}" is not a valid JSON Schema`);
}
}
catch (err) {
if (!args.options.continueOnError || !(0, errors_js_1.isHandledError)(err)) {
return (0, maybe_js_1.default)(args.callback, Promise.reject(err));
}
if (this.$refs._$refs[url.stripHash(args.path)]) {
this.$refs._$refs[url.stripHash(args.path)].addError(err);
}
return (0, maybe_js_1.default)(args.callback, Promise.resolve(null));
}
}
static parse() {
const parser = new $RefParser();
return parser.parse.apply(parser, arguments);
}
async resolve() {
const args = (0, normalize_args_js_1.default)(arguments);
try {
await this.parse(args.path, args.schema, args.options);
await (0, resolve_external_js_1.default)(this, args.options);
finalize(this);
return (0, maybe_js_1.default)(args.callback, Promise.resolve(this.$refs));
}
catch (err) {
return (0, maybe_js_1.default)(args.callback, Promise.reject(err));
}
}
static resolve() {
const instance = new $RefParser();
return instance.resolve.apply(instance, arguments);
}
static bundle() {
const instance = new $RefParser();
return instance.bundle.apply(instance, arguments);
}
async bundle() {
const args = (0, normalize_args_js_1.default)(arguments);
try {
await this.resolve(args.path, args.schema, args.options);
(0, bundle_js_1.default)(this, args.options);
finalize(this);
return (0, maybe_js_1.default)(args.callback, Promise.resolve(this.schema));
}
catch (err) {
return (0, maybe_js_1.default)(args.callback, Promise.reject(err));
}
}
static dereference() {
const instance = new $RefParser();
return instance.dereference.apply(instance, arguments);
}
async dereference() {
const args = (0, normalize_args_js_1.default)(arguments);
try {
await this.resolve(args.path, args.schema, args.options);
(0, dereference_js_1.default)(this, args.options);
finalize(this);
return (0, maybe_js_1.default)(args.callback, Promise.resolve(this.schema));
}
catch (err) {
return (0, maybe_js_1.default)(args.callback, Promise.reject(err));
}
}
}
exports.$RefParser = $RefParser;
exports.default = $RefParser;
function finalize(parser) {
const errors = errors_js_1.JSONParserErrorGroup.getParserErrors(parser);
if (errors.length > 0) {
throw new errors_js_1.JSONParserErrorGroup(parser);
}
}
exports.parse = $RefParser.parse;
exports.resolve = $RefParser.resolve;
exports.bundle = $RefParser.bundle;
exports.dereference = $RefParser.dereference;
@@ -0,0 +1,13 @@
import type { Options, ParserOptions } from "./options.js";
import type { JSONSchema, SchemaCallback } from "./types";
export interface NormalizedArguments<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>> {
path: string;
schema: S;
options: O & Options<S>;
callback: SchemaCallback<S>;
}
/**
* Normalizes the given arguments, accounting for optional args.
*/
export declare function normalizeArgs<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(_args: Partial<IArguments>): NormalizedArguments<S, O>;
export default normalizeArgs;
@@ -0,0 +1,55 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.normalizeArgs = normalizeArgs;
const options_js_1 = require("./options.js");
/**
* Normalizes the given arguments, accounting for optional args.
*/
function normalizeArgs(_args) {
let path;
let schema;
let options;
let callback;
const args = Array.prototype.slice.call(_args);
if (typeof args[args.length - 1] === "function") {
// The last parameter is a callback function
callback = args.pop();
}
if (typeof args[0] === "string") {
// The first parameter is the path
path = args[0];
if (typeof args[2] === "object") {
// The second parameter is the schema, and the third parameter is the options
schema = args[1];
options = args[2];
}
else {
// The second parameter is the options
schema = undefined;
options = args[1];
}
}
else {
// The first parameter is the schema
path = "";
schema = args[0];
options = args[1];
}
try {
options = (0, options_js_1.getNewOptions)(options);
}
catch (e) {
console.error(`JSON Schema Ref Parser: Error normalizing options: ${e}`);
}
if (!options.mutateInputSchema && typeof schema === "object") {
// Make a deep clone of the schema, so that we don't alter the original object
schema = JSON.parse(JSON.stringify(schema));
}
return {
path,
schema,
options,
callback,
};
}
exports.default = normalizeArgs;
+132
View File
@@ -0,0 +1,132 @@
import type { HTTPResolverOptions, JSONSchema, JSONSchemaObject, Plugin, ResolverOptions } from "./types/index.js";
export type DeepPartial<T> = T extends object ? {
[P in keyof T]?: DeepPartial<T[P]>;
} : T;
export interface BundleOptions {
/**
* A function, called for each path, which can return true to stop this path and all
* subpaths from being processed further. This is useful in schemas where some
* subpaths contain literal $ref keys that should not be changed.
*/
excludedPathMatcher?(path: string): boolean;
/**
* Callback invoked during bundling.
*
* @argument {string} path - The path being processed (ie. the `$ref` string)
* @argument {JSONSchemaObject} value - The JSON-Schema that the `$ref` resolved to
* @argument {JSONSchemaObject} parent - The parent of the processed object
* @argument {string} parentPropName - The prop name of the parent object whose value was processed
*/
onBundle?(path: string, value: JSONSchemaObject, parent?: JSONSchemaObject, parentPropName?: string): void;
}
export interface DereferenceOptions {
/**
* Determines whether circular `$ref` pointers are handled.
*
* If set to `false`, then a `ReferenceError` will be thrown if the schema contains any circular references.
*
* If set to `"ignore"`, then circular references will simply be ignored. No error will be thrown, but the `$Refs.circular` property will still be set to `true`.
*/
circular?: boolean | "ignore";
/**
* A function, called for each path, which can return true to stop this path and all
* subpaths from being dereferenced further. This is useful in schemas where some
* subpaths contain literal $ref keys that should not be dereferenced.
*/
excludedPathMatcher?(path: string): boolean;
/**
* Callback invoked during circular reference detection.
*
* @argument {string} path - The path that is circular (ie. the `$ref` string)
*/
onCircular?(path: string): void;
/**
* Callback invoked during dereferencing.
*
* @argument {string} path - The path being dereferenced (ie. the `$ref` string)
* @argument {JSONSchemaObject} value - The JSON-Schema that the `$ref` resolved to
* @argument {JSONSchemaObject} parent - The parent of the dereferenced object
* @argument {string} parentPropName - The prop name of the parent object whose value was dereferenced
*/
onDereference?(path: string, value: JSONSchemaObject, parent?: JSONSchemaObject, parentPropName?: string): void;
/**
* An array of properties to preserve when dereferencing a `$ref` schema. Useful if you want to
* enforce non-standard dereferencing behavior like present in the OpenAPI 3.1 specification where
* `description` and `summary` properties are preserved when alongside a `$ref` pointer.
*
* If none supplied then no properties will be preserved and the object will be fully replaced
* with the dereferenced `$ref`.
*/
preservedProperties?: string[];
/**
* Whether a reference should resolve relative to its directory/path, or from the cwd
*
* Default: `relative`
*/
externalReferenceResolution?: "relative" | "root";
}
/**
* Options that determine how JSON schemas are parsed, resolved, and dereferenced.
*
* @param [options] - Overridden options
* @class
*/
export interface $RefParserOptions<S extends object = JSONSchema> {
/**
* The `parse` options determine how different types of files will be parsed.
*
* JSON Schema `$Ref` Parser comes with built-in JSON, YAML, plain-text, and binary parsers, any of which you can configure or disable. You can also add your own custom parsers if you want.
*/
parse: {
json?: Plugin | boolean;
yaml?: Plugin | boolean;
binary?: Plugin | boolean;
text?: Plugin | boolean;
[key: string]: Plugin | boolean | undefined;
};
/**
* The `resolve` options control how JSON Schema $Ref Parser will resolve file paths and URLs, and how those files will be read/downloaded.
*
* JSON Schema `$Ref` Parser comes with built-in support for HTTP and HTTPS, as well as support for local files (when running in Node.js). You can configure or disable either of these built-in resolvers. You can also add your own custom resolvers if you want.
*/
resolve: {
/**
* Determines whether external $ref pointers will be resolved. If this option is disabled, then external `$ref` pointers will simply be ignored.
*/
external?: boolean;
file?: Partial<ResolverOptions<S>> | boolean;
http?: HTTPResolverOptions<S> | boolean;
} & {
[key: string]: Partial<ResolverOptions<S>> | HTTPResolverOptions<S> | boolean | undefined;
};
/**
* By default, JSON Schema $Ref Parser throws the first error it encounters. Setting `continueOnError` to `true`
* causes it to keep processing as much as possible and then throw a single error that contains all errors
* that were encountered.
*/
continueOnError: boolean;
/**
* The `bundle` options control how JSON Schema `$Ref` Parser will process `$ref` pointers within the JSON schema.
*/
bundle: BundleOptions;
/**
* The `dereference` options control how JSON Schema `$Ref` Parser will dereference `$ref` pointers within the JSON schema.
*/
dereference: DereferenceOptions;
/**
* Whether to clone the schema before dereferencing it.
* This is useful when you want to dereference the same schema multiple times, but you don't want to modify the original schema.
* Default: `true` due to mutating the input being the default behavior historically
*/
mutateInputSchema?: boolean;
/**
* The maximum amount of time (in milliseconds) that JSON Schema $Ref Parser will spend dereferencing a single schema.
* It will throw a timeout error if the operation takes longer than this.
*/
timeoutMs?: number;
}
export declare const getJsonSchemaRefParserDefaultOptions: () => $RefParserOptions<JSONSchema>;
export declare const getNewOptions: <S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(options: O | undefined) => O & $RefParserOptions<S>;
export type Options<S extends object = JSONSchema> = $RefParserOptions<S>;
export type ParserOptions<S extends object = JSONSchema> = DeepPartial<$RefParserOptions<S>>;
export default $RefParserOptions;
+135
View File
@@ -0,0 +1,135 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.getNewOptions = exports.getJsonSchemaRefParserDefaultOptions = void 0;
const json_js_1 = __importDefault(require("./parsers/json.js"));
const yaml_js_1 = __importDefault(require("./parsers/yaml.js"));
const text_js_1 = __importDefault(require("./parsers/text.js"));
const binary_js_1 = __importDefault(require("./parsers/binary.js"));
const file_js_1 = __importDefault(require("./resolvers/file.js"));
const http_js_1 = __importDefault(require("./resolvers/http.js"));
const getJsonSchemaRefParserDefaultOptions = () => {
const defaults = {
/**
* Determines how different types of files will be parsed.
*
* You can add additional parsers of your own, replace an existing one with
* your own implementation, or disable any parser by setting it to false.
*/
parse: {
json: { ...json_js_1.default },
yaml: { ...yaml_js_1.default },
text: { ...text_js_1.default },
binary: { ...binary_js_1.default },
},
/**
* Determines how JSON References will be resolved.
*
* You can add additional resolvers of your own, replace an existing one with
* your own implementation, or disable any resolver by setting it to false.
*/
resolve: {
file: { ...file_js_1.default },
http: { ...http_js_1.default },
/**
* Determines whether external $ref pointers will be resolved.
* If this option is disabled, then none of above resolvers will be called.
* Instead, external $ref pointers will simply be ignored.
*
* @type {boolean}
*/
external: true,
},
/**
* By default, JSON Schema $Ref Parser throws the first error it encounters. Setting `continueOnError` to `true`
* causes it to keep processing as much as possible and then throw a single error that contains all errors
* that were encountered.
*/
continueOnError: false,
/**
* Determines the types of JSON references that are allowed.
*/
bundle: {
/**
* A function, called for each path, which can return true to stop this path and all
* subpaths from being processed further. This is useful in schemas where some
* subpaths contain literal $ref keys that should not be changed.
*
* @type {function}
*/
excludedPathMatcher: () => false,
},
/**
* Determines the types of JSON references that are allowed.
*/
dereference: {
/**
* Dereference circular (recursive) JSON references?
* If false, then a {@link ReferenceError} will be thrown if a circular reference is found.
* If "ignore", then circular references will not be dereferenced.
*
* @type {boolean|string}
*/
circular: true,
/**
* A function, called for each path, which can return true to stop this path and all
* subpaths from being dereferenced further. This is useful in schemas where some
* subpaths contain literal $ref keys that should not be dereferenced.
*
* @type {function}
*/
excludedPathMatcher: () => false,
referenceResolution: "relative",
},
mutateInputSchema: true,
};
return defaults;
};
exports.getJsonSchemaRefParserDefaultOptions = getJsonSchemaRefParserDefaultOptions;
const getNewOptions = (options) => {
const newOptions = (0, exports.getJsonSchemaRefParserDefaultOptions)();
if (options) {
merge(newOptions, options);
}
return newOptions;
};
exports.getNewOptions = getNewOptions;
/**
* Merges the properties of the source object into the target object.
*
* @param target - The object that we're populating
* @param source - The options that are being merged
* @returns
*/
function merge(target, source) {
if (isMergeable(source)) {
// prevent prototype pollution
const keys = Object.keys(source).filter((key) => !["__proto__", "constructor", "prototype"].includes(key));
for (let i = 0; i < keys.length; i++) {
const key = keys[i];
const sourceSetting = source[key];
const targetSetting = target[key];
if (isMergeable(sourceSetting)) {
// It's a nested object, so merge it recursively
target[key] = merge(targetSetting || {}, sourceSetting);
}
else if (sourceSetting !== undefined) {
// It's a scalar value, function, or array. No merging necessary. Just overwrite the target value.
target[key] = sourceSetting;
}
}
}
return target;
}
/**
* Determines whether the given value can be merged,
* or if it is a scalar value that should just override the target value.
*
* @param val
* @returns
*/
function isMergeable(val) {
return val && typeof val === "object" && !Array.isArray(val) && !(val instanceof RegExp) && !(val instanceof Date);
}
+8
View File
@@ -0,0 +1,8 @@
import type $Refs from "./refs.js";
import type { ParserOptions } from "./options.js";
import type { JSONSchema } from "./types/index.js";
/**
* Reads and parses the specified file path or URL.
*/
declare function parse<S extends object = JSONSchema, O extends ParserOptions<S> = ParserOptions<S>>(path: string, $refs: $Refs<S, O>, options: O): Promise<string | Buffer<ArrayBufferLike> | S | undefined>;
export default parse;

Some files were not shown because too many files have changed in this diff Show More