From 7c7d92bbb64391bf323c51aa7b22c811ad38d065 Mon Sep 17 00:00:00 2001 From: Jason Stirnaman Date: Tue, 2 Dec 2025 15:37:30 -0600 Subject: [PATCH 01/51] feat(api): Uplift API reference docs with Scalar renderer Replace legacy API documentation approach with modern Scalar-based rendering: ## Architecture Changes - Add renderer abstraction (`layouts/partials/api/`) supporting Scalar and RapiDoc - Create `api` layout type for API reference pages (single.html, list.html) - Configure renderer via `site.Params.apiRenderer` (default: scalar) ## OpenAPI Processing Pipeline (TypeScript) - `api-docs/scripts/generate-openapi-articles.ts` - Main generation script - `api-docs/scripts/openapi-paths-to-hugo-data/` - OpenAPI to Hugo data converter - Generates per-endpoint path fragments for AI agent access - Creates Hugo content pages with `type: api` frontmatter ## AI Agent Accessibility - Full specs at `/openapi/influxdb-{product}.yml` and `.json` - Per-endpoint fragments at `/openapi/influxdb-{product}/paths/` - `` tags in HTML for machine discovery ## Scalar Features - Dark/light theme support synchronized with site theme - InfluxData brand colors - Responsive layout - Download link for OpenAPI spec ## Products Supported - cloud-v2, oss-v2 - influxdb3-core, influxdb3-enterprise - cloud-dedicated, cloud-serverless, clustered Usage: node api-docs/scripts/dist/generate-openapi-articles.js [product] --- .gitignore | 9 + api-docs/scripts/README.md | 390 ++++++++++++ .../scripts/dist/generate-openapi-articles.js | 343 ++++++++++ .../dist/openapi-paths-to-hugo-data/index.js | 275 ++++++++ api-docs/scripts/generate-openapi-articles.ts | 360 +++++++++++ .../openapi-paths-to-hugo-data/index.ts | 528 ++++++++++++++++ .../openapi-paths-to-hugo-data/package.json | 14 + .../openapi-paths-to-hugo-data/yarn.lock | 32 + api-docs/scripts/tsconfig.json | 25 + config/_default/hugo.yml | 2 + .../influxdb/cloud-v2/articles.json | 597 ++++++++++++++++++ layouts/_default/api.html | 34 +- layouts/api/list.html | 33 + layouts/api/single.html | 33 + layouts/partials/api/rapidoc.html | 179 ++++++ layouts/partials/api/renderer.html | 25 + layouts/partials/api/scalar.html | 186 ++++++ 17 files changed, 3064 insertions(+), 1 deletion(-) create mode 100644 api-docs/scripts/README.md create mode 100644 api-docs/scripts/dist/generate-openapi-articles.js create mode 100644 api-docs/scripts/dist/openapi-paths-to-hugo-data/index.js create mode 100644 api-docs/scripts/generate-openapi-articles.ts create mode 100644 api-docs/scripts/openapi-paths-to-hugo-data/index.ts create mode 100644 api-docs/scripts/openapi-paths-to-hugo-data/package.json create mode 100644 api-docs/scripts/openapi-paths-to-hugo-data/yarn.lock create mode 100644 api-docs/scripts/tsconfig.json create mode 100644 data/article-data/influxdb/cloud-v2/articles.json create mode 100644 layouts/api/list.html create mode 100644 layouts/api/single.html create mode 100644 layouts/partials/api/rapidoc.html create mode 100644 layouts/partials/api/renderer.html create mode 100644 layouts/partials/api/scalar.html diff --git a/.gitignore b/.gitignore index 5b5487a3d5..caeee60bd5 100644 --- a/.gitignore +++ b/.gitignore @@ -14,6 +14,13 @@ package-lock.json /content/influxdb*/**/api/**/*.html !api-docs/**/.config.yml /api-docs/redoc-static.html* + +# API documentation generation (generated by api-docs/scripts/) +/content/influxdb/*/api/** +/content/influxdb3/*/api/** +/content/influxdb3/*/reference/api/** +/static/openapi + /helper-scripts/output/* /telegraf-build !telegraf-build/templates @@ -38,6 +45,8 @@ tmp # TypeScript build output **/dist/ +# Exception: include compiled API doc scripts for easier use +!api-docs/scripts/dist/ **/dist-lambda/ # User context files for AI assistant tools diff --git a/api-docs/scripts/README.md b/api-docs/scripts/README.md new file mode 100644 index 0000000000..2ea5f440d1 --- /dev/null +++ b/api-docs/scripts/README.md @@ -0,0 +1,390 @@ +# API Documentation Generation Scripts + +TypeScript-based scripts for generating Hugo data files and content pages from OpenAPI specifications. + +## Overview + +These scripts convert OpenAPI v3 specifications into Hugo-compatible data files and content pages for all InfluxDB products. + +### What Gets Generated + +For each product, the scripts generate: + +1. **OpenAPI Spec Copies** (static directory): + - `influxdb-{product}.yml` - YAML version of the spec + - `influxdb-{product}.json` - JSON version of the spec + +2. **Path Group Fragments** (static/openapi/{product}/paths/): + - Separate YAML and JSON files for each API path group + - Example: `ref-api-v2-buckets.yaml` and `ref-api-v2-buckets.json` + +3. **Article Metadata** (data/article-data/influxdb/{product}/): + - `articles.yml` - Hugo data file with article metadata + - `articles.json` - JSON version for programmatic access + +4. **Hugo Content Pages** (content/{product}/api/): + - Markdown files generated from article data + - One page per API path group + +## Quick Start + +### Build Scripts + +Compile TypeScript to JavaScript (required before running): + +```bash +yarn build:api-scripts +``` + +### Generate API Pages + +**Generate all products:** + +```bash +yarn build:api-pages +``` + +**Generate specific product(s):** + +```bash +yarn build:api-pages:product cloud-v2 +yarn build:api-pages:product cloud-v2 oss-v2 +``` + +## Supported Products + +| Product ID | Description | Spec File | Content Path | +| ---------------------- | ------------------------- | ------------------------------------------------ | -------------------------------------------- | +| `cloud-v2` | InfluxDB Cloud (v2 API) | `api-docs/cloud/v2/ref.yml` | `content/influxdb/cloud/api/v2` | +| `oss-v2` | InfluxDB OSS v2 | `api-docs/v2/ref.yml` | `content/influxdb/v2/api/v2` | +| `influxdb3-core` | InfluxDB 3 Core | `api-docs/influxdb3/core/v3/ref.yml` | `content/influxdb3/core/reference/api` | +| `influxdb3-enterprise` | InfluxDB 3 Enterprise | `api-docs/influxdb3/enterprise/v3/ref.yml` | `content/influxdb3/enterprise/reference/api` | +| `cloud-dedicated` | InfluxDB Cloud Dedicated | `api-docs/influxdb3/cloud-dedicated/v2/ref.yml` | `content/influxdb/cloud-dedicated/api` | +| `cloud-serverless` | InfluxDB Cloud Serverless | `api-docs/influxdb3/cloud-serverless/v2/ref.yml` | `content/influxdb/cloud-serverless/api` | +| `clustered` | InfluxDB Clustered | `api-docs/influxdb3/clustered/v2/ref.yml` | `content/influxdb/clustered/api` | + +## Architecture + +### TypeScript Files + +``` +api-docs/scripts/ +├── tsconfig.json # TypeScript configuration +├── generate-openapi-articles.ts # Main orchestration script +└── openapi-paths-to-hugo-data/ + ├── index.ts # Core conversion logic + └── package.json # Module dependencies +``` + +### Compiled JavaScript + +After running `yarn build:api-scripts`, compiled files are in: + +``` +api-docs/scripts/dist/ +├── generate-openapi-articles.js +├── generate-openapi-articles.d.ts +└── openapi-paths-to-hugo-data/ + ├── index.js + └── index.d.ts +``` + +## Script Details + +### generate-openapi-articles.ts + +Main orchestration script that processes products. + +**For each product, it:** + +1. Runs `getswagger.sh` to fetch/bundle the OpenAPI spec +2. Copies spec to `static/openapi/influxdb-{product}.yml` +3. Generates JSON version at `static/openapi/influxdb-{product}.json` +4. Generates path group fragments (YAML and JSON) +5. Creates article metadata (YAML and JSON) +6. Generates Hugo content pages + +**Usage:** + +```bash +node api-docs/scripts/dist/generate-openapi-articles.js [product-ids...] + +# Examples: +node api-docs/scripts/dist/generate-openapi-articles.js # All products +node api-docs/scripts/dist/generate-openapi-articles.js cloud-v2 # Single product +node api-docs/scripts/dist/generate-openapi-articles.js cloud-v2 oss-v2 # Multiple products +``` + +**Output:** + +``` +📋 Processing all products... + +================================================================================ +Processing InfluxDB Cloud (v2 API) +================================================================================ + +Fetching OpenAPI spec for cloud-v2... +✓ Copied spec to static/openapi/influxdb-cloud-v2.yml +✓ Generated JSON spec at static/openapi/influxdb-cloud-v2.json + +Generating OpenAPI path files in static/openapi/influxdb-cloud-v2/paths.... +Generated: ref-api-v2-buckets.yaml and ref-api-v2-buckets.json +... + +Generating OpenAPI article data in data/article-data/influxdb/cloud-v2... +Generated 32 articles in data/article-data/influxdb/cloud-v2 + +✅ Successfully processed InfluxDB Cloud (v2 API) +``` + +### openapi-paths-to-hugo-data/index.ts + +Core conversion library that processes OpenAPI specs. + +**Key Functions:** + +- `generateHugoData(options)` - Main entry point +- `writePathOpenapis()` - Groups paths and writes fragments +- `createArticleDataForPathGroup()` - Generates article metadata + +**Path Grouping Logic:** + +Paths are grouped by their base path (first 3-4 segments, excluding placeholders): + +``` +/api/v2/buckets → api-v2-buckets +/api/v2/buckets/{id} → api-v2-buckets (same group) +/api/v2/authorizations → api-v2-authorizations +``` + +**Output Formats:** + +- **YAML**: Hugo-compatible data files +- **JSON**: Programmatic access and tooling + +## Development + +### Prerequisites + +- Node.js >= 16.0.0 +- Yarn package manager +- TypeScript installed (via root package.json) + +### Setup + +```bash +# Install dependencies (from repo root) +yarn install + +# Or install in the openapi-paths-to-hugo-data module +cd api-docs/scripts/openapi-paths-to-hugo-data +yarn install +``` + +### TypeScript Configuration + +The scripts use a dedicated `tsconfig.json` with CommonJS output: + +```json +{ + "compilerOptions": { + "target": "ES2021", + "module": "CommonJS", + "outDir": "./dist", + "strict": true, + ... + } +} +``` + +### Making Changes + +1. Edit TypeScript files in `api-docs/scripts/` +2. Compile: `yarn build:api-scripts` +3. Test: `yarn build:api-pages:product cloud-v2` + +### Watch Mode + +For active development: + +```bash +cd api-docs/scripts/openapi-paths-to-hugo-data +yarn build:watch +``` + +## Testing + +### Unit Test Example + +```javascript +const converter = require('./api-docs/scripts/dist/openapi-paths-to-hugo-data/index.js'); + +converter.generateHugoData({ + specFile: 'api-docs/influxdb/cloud/v2/ref.yml', + dataOutPath: './test-output/paths', + articleOutPath: './test-output/articles' +}); +``` + +### Verify Output + +After generation, check: + +1. **Path fragments exist:** + ```bash + ls -l static/openapi/influxdb-cloud-v2/paths/ + ``` + +2. **Both formats generated:** + ```bash + ls -l static/openapi/influxdb-cloud-v2/paths/*.{yaml,json} + ``` + +3. **Article data created:** + ```bash + cat data/article-data/influxdb/cloud-v2/articles.yml + cat data/article-data/influxdb/cloud-v2/articles.json + ``` + +4. **Hugo pages generated:** + ```bash + ls -l content/influxdb/cloud/api/v2/ + ``` + +## Troubleshooting + +### TypeScript Compilation Errors + +```bash +# Clean and rebuild +rm -rf api-docs/scripts/dist +yarn build:api-scripts +``` + +### Missing Type Definitions + +```bash +cd api-docs/scripts/openapi-paths-to-hugo-data +yarn add --dev @types/js-yaml @types/node +``` + +### Spec File Not Found + +Make sure to run `getswagger.sh` first: + +```bash +cd api-docs +./getswagger.sh cloud-v2 -B +``` + +### Path Grouping Issues + +The script groups paths by their first 3-4 segments. If you need different grouping: + +1. Edit `writePathOpenapis()` in `openapi-paths-to-hugo-data/index.ts` +2. Modify the `key.slice(0, 4)` logic +3. Rebuild: `yarn build:api-scripts` + +## Migration from JavaScript + +The original JavaScript files are preserved for reference: + +- `api-docs/scripts/generate-openapi-articles.js` (original) +- `api-docs/scripts/openapi-paths-to-hugo-data/index.js` (original) + +### Key Improvements + +1. **TypeScript**: Full type safety and IDE support +2. **Dual Formats**: Generates both YAML and JSON +3. **All Products**: Includes all 7 InfluxDB products +4. **Better Errors**: Clear error messages with product validation +5. **CLI Arguments**: Support for processing specific products +6. **Comprehensive Logging**: Progress indicators and status messages + +## Related Documentation + +- **API Docs README**: `api-docs/README.md` - Complete API documentation workflow +- **OpenAPI Plugins**: `api-docs/openapi/plugins/` - Custom processing plugins +- **Hugo Data to Pages**: `hugo-data-to-pages/` - Page generation from data files + +## Examples + +### Generate Only Cloud Products + +```bash +yarn build:api-pages:product cloud-v2 cloud-dedicated cloud-serverless +``` + +### Generate Only InfluxDB 3 Products + +```bash +yarn build:api-pages:product influxdb3-core influxdb3-enterprise +``` + +### Process Single Product Manually + +```bash +# Compile first +yarn build:api-scripts + +# Run for specific product +node api-docs/scripts/dist/generate-openapi-articles.js oss-v2 +``` + +## API Reference + +### generateHugoData(options) + +Generate Hugo data files from an OpenAPI specification. + +**Parameters:** + +- `options.specFile` (string) - Path to the OpenAPI spec file +- `options.dataOutPath` (string) - Output path for OpenAPI path fragments +- `options.articleOutPath` (string) - Output path for article metadata + +**Example:** + +```javascript +const { generateHugoData } = require('./api-docs/scripts/dist/openapi-paths-to-hugo-data/index.js'); + +generateHugoData({ + specFile: 'api-docs/influxdb/cloud/v2/ref.yml', + dataOutPath: 'static/openapi/influxdb-cloud-v2/paths', + articleOutPath: 'data/article-data/influxdb/cloud-v2' +}); +``` + +### productConfigs + +Map of product configurations exported from `generate-openapi-articles.ts`. + +**Type:** + +```typescript +type ProductConfig = { + specFile: string; // Path to OpenAPI spec + pagesDir: string; // Hugo content directory + description?: string; // Product description +}; + +const productConfigs: Record; +``` + +**Usage:** + +```javascript +const { productConfigs } = require('./api-docs/scripts/dist/generate-openapi-articles.js'); + +console.log(productConfigs['cloud-v2']); +// { +// specFile: 'api-docs/cloud/v2/ref.yml', +// pagesDir: 'content/influxdb/cloud/api/v2', +// description: 'InfluxDB Cloud (v2 API)' +// } +``` + +## License + +Same as parent docs-v2 repository (MIT). diff --git a/api-docs/scripts/dist/generate-openapi-articles.js b/api-docs/scripts/dist/generate-openapi-articles.js new file mode 100644 index 0000000000..92dcc39115 --- /dev/null +++ b/api-docs/scripts/dist/generate-openapi-articles.js @@ -0,0 +1,343 @@ +#!/usr/bin/env node +'use strict'; +/** + * Generate OpenAPI Articles Script + * + * Generates Hugo data files and content pages from OpenAPI specifications + * for all InfluxDB products. + * + * This script: + * 1. Runs getswagger.sh to fetch/bundle OpenAPI specs + * 2. Copies specs to static directory for download + * 3. Generates path group fragments (YAML and JSON) + * 4. Creates article metadata (YAML and JSON) + * 5. Generates Hugo content pages from article data + * + * Usage: + * node generate-openapi-articles.js # Generate all products + * node generate-openapi-articles.js cloud-v2 # Generate single product + * node generate-openapi-articles.js cloud-v2 oss-v2 # Generate multiple products + * + * @module generate-openapi-articles + */ +var __createBinding = + (this && this.__createBinding) || + (Object.create + ? function (o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if ( + !desc || + ('get' in desc ? !m.__esModule : desc.writable || desc.configurable) + ) { + desc = { + enumerable: true, + get: function () { + return m[k]; + }, + }; + } + Object.defineProperty(o, k2, desc); + } + : function (o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); +var __setModuleDefault = + (this && this.__setModuleDefault) || + (Object.create + ? function (o, v) { + Object.defineProperty(o, 'default', { enumerable: true, value: v }); + } + : function (o, v) { + o['default'] = v; + }); +var __importStar = + (this && this.__importStar) || + function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) + for (var k in mod) + if (k !== 'default' && Object.prototype.hasOwnProperty.call(mod, k)) + __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; + }; +Object.defineProperty(exports, '__esModule', { value: true }); +exports.productConfigs = void 0; +exports.processProduct = processProduct; +exports.generateDataFromOpenAPI = generateDataFromOpenAPI; +exports.generatePagesFromArticleData = generatePagesFromArticleData; +const child_process_1 = require('child_process'); +const path = __importStar(require('path')); +const fs = __importStar(require('fs')); +// Import the OpenAPI to Hugo converter +const openapiPathsToHugo = require('./openapi-paths-to-hugo-data/index.js'); +// Calculate the relative paths +const DOCS_ROOT = '.'; +const API_DOCS_ROOT = 'api-docs'; +/** + * Execute a shell command and handle errors + * + * @param command - Command to execute + * @param description - Human-readable description of the command + * @throws Exits process with code 1 on error + */ +function execCommand(command, description) { + try { + if (description) { + console.log(`\n${description}...`); + } + console.log(`Executing: ${command}\n`); + (0, child_process_1.execSync)(command, { stdio: 'inherit' }); + } catch (error) { + console.error(`\n❌ Error executing command: ${command}`); + if (error instanceof Error) { + console.error(error.message); + } + process.exit(1); + } +} +/** + * Generate Hugo data files from OpenAPI specification + * + * @param specFile - Path to the OpenAPI spec file + * @param dataOutPath - Output path for OpenAPI path fragments + * @param articleOutPath - Output path for article metadata + */ +function generateDataFromOpenAPI(specFile, dataOutPath, articleOutPath) { + if (!fs.existsSync(dataOutPath)) { + fs.mkdirSync(dataOutPath, { recursive: true }); + } + openapiPathsToHugo.generateHugoData({ + dataOutPath, + articleOutPath, + specFile, + }); +} +/** + * Generate Hugo content pages from article data + * + * Creates markdown files with frontmatter from article metadata. + * Each article becomes a page with type: api that renders via Scalar. + * + * @param articlesPath - Path to the articles data directory + * @param contentPath - Output path for generated content pages + */ +function generatePagesFromArticleData(articlesPath, contentPath) { + const yaml = require('js-yaml'); + const articlesFile = path.join(articlesPath, 'articles.yml'); + if (!fs.existsSync(articlesFile)) { + console.warn(`⚠️ Articles file not found: ${articlesFile}`); + return; + } + // Read articles data + const articlesContent = fs.readFileSync(articlesFile, 'utf8'); + const data = yaml.load(articlesContent); + if (!data.articles || !Array.isArray(data.articles)) { + console.warn(`⚠️ No articles found in ${articlesFile}`); + return; + } + // Ensure content directory exists + if (!fs.existsSync(contentPath)) { + fs.mkdirSync(contentPath, { recursive: true }); + } + // Generate a page for each article + for (const article of data.articles) { + const pagePath = path.join(contentPath, article.path); + const pageFile = path.join(pagePath, '_index.md'); + // Create directory if needed + if (!fs.existsSync(pagePath)) { + fs.mkdirSync(pagePath, { recursive: true }); + } + // Generate frontmatter + const frontmatter = { + title: article.fields.name || article.path, + description: `API reference for ${article.fields.name || article.path}`, + type: 'api', + staticFilePath: article.fields.staticFilePath, + weight: 100, + }; + const pageContent = `--- +${yaml.dump(frontmatter)}--- +`; + fs.writeFileSync(pageFile, pageContent); + } + console.log( + `✓ Generated ${data.articles.length} content pages in ${contentPath}` + ); +} +/** + * Product configurations for all InfluxDB editions + * + * Maps product identifiers to their OpenAPI specs and content directories + */ +const productConfigs = { + 'cloud-v2': { + specFile: path.join(API_DOCS_ROOT, 'influxdb/cloud/v2/ref.yml'), + pagesDir: path.join(DOCS_ROOT, 'content/influxdb/cloud/api/v2'), + description: 'InfluxDB Cloud (v2 API)', + }, + 'oss-v2': { + specFile: path.join(API_DOCS_ROOT, 'influxdb/v2/v2/ref.yml'), + pagesDir: path.join(DOCS_ROOT, 'content/influxdb/v2/api/v2'), + description: 'InfluxDB OSS v2', + }, + 'influxdb3-core': { + specFile: path.join(API_DOCS_ROOT, 'influxdb3/core/v3/ref.yml'), + pagesDir: path.join(DOCS_ROOT, 'content/influxdb3/core/reference/api'), + description: 'InfluxDB 3 Core', + }, + 'influxdb3-enterprise': { + specFile: path.join(API_DOCS_ROOT, 'influxdb3/enterprise/v3/ref.yml'), + pagesDir: path.join( + DOCS_ROOT, + 'content/influxdb3/enterprise/reference/api' + ), + description: 'InfluxDB 3 Enterprise', + }, + 'cloud-dedicated': { + specFile: path.join( + API_DOCS_ROOT, + 'influxdb3/cloud-dedicated/management/openapi.yml' + ), + pagesDir: path.join(DOCS_ROOT, 'content/influxdb3/cloud-dedicated/api'), + description: 'InfluxDB Cloud Dedicated', + }, + 'cloud-serverless': { + specFile: path.join( + API_DOCS_ROOT, + 'influxdb3/cloud-serverless/management/openapi.yml' + ), + pagesDir: path.join(DOCS_ROOT, 'content/influxdb3/cloud-serverless/api'), + description: 'InfluxDB Cloud Serverless', + }, + clustered: { + specFile: path.join( + API_DOCS_ROOT, + 'influxdb3/clustered/management/openapi.yml' + ), + pagesDir: path.join(DOCS_ROOT, 'content/influxdb3/clustered/api'), + description: 'InfluxDB Clustered', + }, +}; +exports.productConfigs = productConfigs; +/** + * Process a single product: fetch spec, generate data, and create pages + * + * @param productKey - Product identifier (e.g., 'cloud-v2') + * @param config - Product configuration + */ +function processProduct(productKey, config) { + console.log('\n' + '='.repeat(80)); + console.log(`Processing ${config.description || productKey}`); + console.log('='.repeat(80)); + const staticPath = path.join(DOCS_ROOT, 'static/openapi'); + const staticSpecPath = path.join(staticPath, `influxdb-${productKey}.yml`); + const staticJsonSpecPath = path.join( + staticPath, + `influxdb-${productKey}.json` + ); + const staticPathsPath = path.join(staticPath, `influxdb-${productKey}/paths`); + const articlesPath = path.join( + DOCS_ROOT, + `data/article-data/influxdb/${productKey}` + ); + // Check if spec file exists + if (!fs.existsSync(config.specFile)) { + console.warn(`⚠️ Spec file not found: ${config.specFile}`); + console.log('Skipping this product. Run getswagger.sh first if needed.\n'); + return; + } + try { + // Step 1: Execute the getswagger.sh script to fetch/bundle the spec + const getswaggerScript = path.join(API_DOCS_ROOT, 'getswagger.sh'); + if (fs.existsSync(getswaggerScript)) { + execCommand( + `${getswaggerScript} ${productKey} -B`, + `Fetching OpenAPI spec for ${productKey}` + ); + } else { + console.log(`⚠️ getswagger.sh not found, skipping fetch step`); + } + // Step 2: Ensure static directory exists + if (!fs.existsSync(staticPath)) { + fs.mkdirSync(staticPath, { recursive: true }); + } + // Step 3: Copy the generated OpenAPI spec to static folder (YAML) + if (fs.existsSync(config.specFile)) { + fs.copyFileSync(config.specFile, staticSpecPath); + console.log(`✓ Copied spec to ${staticSpecPath}`); + // Step 4: Generate JSON version of the spec + try { + const yaml = require('js-yaml'); + const specContent = fs.readFileSync(config.specFile, 'utf8'); + const specObject = yaml.load(specContent); + fs.writeFileSync( + staticJsonSpecPath, + JSON.stringify(specObject, null, 2) + ); + console.log(`✓ Generated JSON spec at ${staticJsonSpecPath}`); + } catch (jsonError) { + console.warn(`⚠️ Could not generate JSON spec: ${jsonError}`); + } + } + // Step 5: Generate Hugo data from OpenAPI spec (path fragments for AI agents) + generateDataFromOpenAPI(config.specFile, staticPathsPath, articlesPath); + // Step 6: Generate Hugo content pages from article data + generatePagesFromArticleData(articlesPath, config.pagesDir); + console.log( + `\n✅ Successfully processed ${config.description || productKey}\n` + ); + } catch (error) { + console.error(`\n❌ Error processing ${productKey}:`, error); + process.exit(1); + } +} +/** + * Main execution function + */ +function main() { + const args = process.argv.slice(2); + // Determine which products to process + let productsToProcess; + if (args.length === 0) { + // No arguments: process all products + productsToProcess = Object.keys(productConfigs); + console.log('\n📋 Processing all products...\n'); + } else { + // Arguments provided: process only specified products + productsToProcess = args; + console.log( + `\n📋 Processing specified products: ${productsToProcess.join(', ')}\n` + ); + } + // Validate product keys + const invalidProducts = productsToProcess.filter( + (key) => !productConfigs[key] + ); + if (invalidProducts.length > 0) { + console.error( + `\n❌ Invalid product identifier(s): ${invalidProducts.join(', ')}` + ); + console.error('\nValid products:'); + Object.keys(productConfigs).forEach((key) => { + console.error(` - ${key}: ${productConfigs[key].description}`); + }); + process.exit(1); + } + // Process each product + productsToProcess.forEach((productKey) => { + const config = productConfigs[productKey]; + processProduct(productKey, config); + }); + console.log('\n' + '='.repeat(80)); + console.log('✅ All products processed successfully!'); + console.log('='.repeat(80) + '\n'); +} +// Execute if run directly +if (require.main === module) { + main(); +} +//# sourceMappingURL=generate-openapi-articles.js.map diff --git a/api-docs/scripts/dist/openapi-paths-to-hugo-data/index.js b/api-docs/scripts/dist/openapi-paths-to-hugo-data/index.js new file mode 100644 index 0000000000..363643bc6f --- /dev/null +++ b/api-docs/scripts/dist/openapi-paths-to-hugo-data/index.js @@ -0,0 +1,275 @@ +'use strict'; +/** + * OpenAPI to Hugo Data Converter + * + * Converts OpenAPI v3 specifications into Hugo-compatible data files. + * Generates both YAML and JSON versions of spec fragments grouped by path. + * + * @module openapi-paths-to-hugo-data + */ +var __createBinding = + (this && this.__createBinding) || + (Object.create + ? function (o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if ( + !desc || + ('get' in desc ? !m.__esModule : desc.writable || desc.configurable) + ) { + desc = { + enumerable: true, + get: function () { + return m[k]; + }, + }; + } + Object.defineProperty(o, k2, desc); + } + : function (o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); +var __setModuleDefault = + (this && this.__setModuleDefault) || + (Object.create + ? function (o, v) { + Object.defineProperty(o, 'default', { enumerable: true, value: v }); + } + : function (o, v) { + o['default'] = v; + }); +var __importStar = + (this && this.__importStar) || + function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) + for (var k in mod) + if (k !== 'default' && Object.prototype.hasOwnProperty.call(mod, k)) + __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; + }; +Object.defineProperty(exports, '__esModule', { value: true }); +exports.generateHugoData = generateHugoData; +const yaml = __importStar(require('js-yaml')); +const fs = __importStar(require('fs')); +const path = __importStar(require('path')); +/** + * Read a YAML file and parse it + * + * @param filepath - Path to the YAML file + * @param encoding - File encoding (default: 'utf8') + * @returns Parsed YAML content + */ +function readFile(filepath, encoding = 'utf8') { + const content = fs.readFileSync(filepath, encoding); + return yaml.load(content); +} +/** + * Write data to a YAML file + * + * @param data - Data to write + * @param outputTo - Output file path + */ +function writeDataFile(data, outputTo) { + fs.writeFileSync(outputTo, yaml.dump(data)); +} +/** + * Write data to a JSON file + * + * @param data - Data to write + * @param outputTo - Output file path + */ +function writeJsonFile(data, outputTo) { + fs.writeFileSync(outputTo, JSON.stringify(data, null, 2)); +} +/** + * OpenAPI utility functions + */ +const openapiUtils = { + /** + * Check if a path fragment is a placeholder (e.g., {id}) + * + * @param str - Path fragment to check + * @returns True if the fragment is a placeholder + */ + isPlaceholderFragment(str) { + const placeholderRegex = /^\{.*\}$/; + return placeholderRegex.test(str); + }, +}; +/** + * Write OpenAPI specs grouped by path to separate files + * Generates both YAML and JSON versions + * + * @param openapi - OpenAPI document + * @param prefix - Filename prefix for output files + * @param outPath - Output directory path + */ +function writePathOpenapis(openapi, prefix, outPath) { + const pathGroups = {}; + // Group paths by their base path (first 3-4 segments, excluding placeholders) + Object.keys(openapi.paths) + .sort() + .forEach((p) => { + const delimiter = '/'; + let key = p.split(delimiter); + // Check if this is an item path (ends with a placeholder) + let isItemPath = openapiUtils.isPlaceholderFragment(key[key.length - 1]); + if (isItemPath) { + key = key.slice(0, -1); + } + // Take first 4 segments + key = key.slice(0, 4); + // Check if the last segment is still a placeholder + isItemPath = openapiUtils.isPlaceholderFragment(key[key.length - 1]); + if (isItemPath) { + key = key.slice(0, -1); + } + const groupKey = key.join('/'); + pathGroups[groupKey] = pathGroups[groupKey] || {}; + pathGroups[groupKey][p] = openapi.paths[p]; + }); + // Write each path group to separate YAML and JSON files + Object.keys(pathGroups).forEach((pg) => { + // Deep copy openapi + const doc = JSON.parse(JSON.stringify(openapi)); + doc.paths = pathGroups[pg]; + doc.info.title = `${pg}\n${doc.info.title}`; + doc['x-pathGroup'] = pg; + try { + if (!fs.existsSync(outPath)) { + fs.mkdirSync(outPath, { recursive: true }); + } + const baseFilename = `${prefix}${pg.replaceAll('/', '-').replace(/^-/, '')}`; + const yamlPath = path.resolve(outPath, `${baseFilename}.yaml`); + const jsonPath = path.resolve(outPath, `${baseFilename}.json`); + // Write both YAML and JSON versions + writeDataFile(doc, yamlPath); + writeJsonFile(doc, jsonPath); + console.log(`Generated: ${baseFilename}.yaml and ${baseFilename}.json`); + } catch (err) { + console.error(`Error writing path group ${pg}:`, err); + } + }); +} +/** + * Create article metadata for a path group + * + * @param openapi - OpenAPI document with x-pathGroup + * @returns Article metadata object + */ +function createArticleDataForPathGroup(openapi) { + const article = { + path: '', + fields: { + name: openapi['x-pathGroup'] || '', + describes: Object.keys(openapi.paths), + }, + }; + /** + * Convert path to snake case for article path + * + * @param p - Path to convert + * @returns Snake-cased path + */ + const snakifyPath = (p) => { + if (!p) { + return ''; + } + return p.replace(/^\//, '').replaceAll('/', '-'); + }; + article.path = snakifyPath(openapi['x-pathGroup'] || ''); + article.fields.title = openapi.info?.title; + article.fields.description = openapi.description; + const pathGroupFrags = path.parse(openapi['x-pathGroup'] || ''); + article.fields.tags = [pathGroupFrags?.dir, pathGroupFrags?.name] + .filter(Boolean) + .map((t) => snakifyPath(t)); + return article; +} +/** + * Write OpenAPI article metadata to Hugo data files + * Generates articles.yml and articles.json + * + * @param sourcePath - Path to directory containing OpenAPI fragment files + * @param targetPath - Output path for article data + * @param opts - Options including file pattern filter + */ +function writeOpenapiArticleData(sourcePath, targetPath, opts) { + /** + * Check if path is a file + */ + const isFile = (filePath) => { + return fs.lstatSync(filePath).isFile(); + }; + /** + * Check if filename matches pattern + */ + const matchesPattern = (filePath) => { + return opts.filePattern + ? path.parse(filePath).name.startsWith(opts.filePattern) + : true; + }; + try { + const articles = fs + .readdirSync(sourcePath) + .map((fileName) => path.join(sourcePath, fileName)) + .filter(matchesPattern) + .filter(isFile) + .filter( + (filePath) => filePath.endsWith('.yaml') || filePath.endsWith('.yml') + ) // Only process YAML files + .map((filePath) => { + const openapi = readFile(filePath); + const article = createArticleDataForPathGroup(openapi); + article.fields.source = filePath; + // Hugo omits "/static" from the URI when serving files stored in "./static" + article.fields.staticFilePath = filePath.replace(/^static\//, '/'); + return article; + }); + if (!fs.existsSync(targetPath)) { + fs.mkdirSync(targetPath, { recursive: true }); + } + const articleCollection = { articles }; + // Write both YAML and JSON versions + const yamlPath = path.resolve(targetPath, 'articles.yml'); + const jsonPath = path.resolve(targetPath, 'articles.json'); + writeDataFile(articleCollection, yamlPath); + writeJsonFile(articleCollection, jsonPath); + console.log(`Generated ${articles.length} articles in ${targetPath}`); + } catch (e) { + console.error('Error writing article data:', e); + } +} +/** + * Generate Hugo data files from an OpenAPI specification + * + * This function: + * 1. Reads the OpenAPI spec file + * 2. Groups paths by their base path + * 3. Writes each group to separate YAML and JSON files + * 4. Generates article metadata for Hugo + * + * @param options - Generation options + */ +function generateHugoData(options) { + const filenamePrefix = `${path.parse(options.specFile).name}-`; + const sourceFile = readFile(options.specFile, 'utf8'); + console.log(`\nGenerating OpenAPI path files in ${options.dataOutPath}....`); + writePathOpenapis(sourceFile, filenamePrefix, options.dataOutPath); + console.log( + `\nGenerating OpenAPI article data in ${options.articleOutPath}...` + ); + writeOpenapiArticleData(options.dataOutPath, options.articleOutPath, { + filePattern: filenamePrefix, + }); + console.log('\nGeneration complete!\n'); +} +// CommonJS export for backward compatibility +module.exports = { + generateHugoData, +}; +//# sourceMappingURL=index.js.map diff --git a/api-docs/scripts/generate-openapi-articles.ts b/api-docs/scripts/generate-openapi-articles.ts new file mode 100644 index 0000000000..c48c9e9a38 --- /dev/null +++ b/api-docs/scripts/generate-openapi-articles.ts @@ -0,0 +1,360 @@ +#!/usr/bin/env node +/** + * Generate OpenAPI Articles Script + * + * Generates Hugo data files and content pages from OpenAPI specifications + * for all InfluxDB products. + * + * This script: + * 1. Runs getswagger.sh to fetch/bundle OpenAPI specs + * 2. Copies specs to static directory for download + * 3. Generates path group fragments (YAML and JSON) + * 4. Creates article metadata (YAML and JSON) + * 5. Generates Hugo content pages from article data + * + * Usage: + * node generate-openapi-articles.js # Generate all products + * node generate-openapi-articles.js cloud-v2 # Generate single product + * node generate-openapi-articles.js cloud-v2 oss-v2 # Generate multiple products + * + * @module generate-openapi-articles + */ + +import { execSync } from 'child_process'; +import * as path from 'path'; +import * as fs from 'fs'; + +// Import the OpenAPI to Hugo converter +const openapiPathsToHugo = require('./openapi-paths-to-hugo-data/index.js'); + +/** + * Product configuration for API generation + */ +interface ProductConfig { + /** Path to the OpenAPI spec file */ + specFile: string; + /** Path to the Hugo content directory for generated pages */ + pagesDir: string; + /** Optional description of the product */ + description?: string; +} + +/** + * Map of product identifiers to their configuration + */ +type ProductConfigMap = Record; + +// Calculate the relative paths +const DOCS_ROOT = '.'; +const API_DOCS_ROOT = 'api-docs'; + +/** + * Execute a shell command and handle errors + * + * @param command - Command to execute + * @param description - Human-readable description of the command + * @throws Exits process with code 1 on error + */ +function execCommand(command: string, description?: string): void { + try { + if (description) { + console.log(`\n${description}...`); + } + console.log(`Executing: ${command}\n`); + execSync(command, { stdio: 'inherit' }); + } catch (error) { + console.error(`\n❌ Error executing command: ${command}`); + if (error instanceof Error) { + console.error(error.message); + } + process.exit(1); + } +} + +/** + * Generate Hugo data files from OpenAPI specification + * + * @param specFile - Path to the OpenAPI spec file + * @param dataOutPath - Output path for OpenAPI path fragments + * @param articleOutPath - Output path for article metadata + */ +function generateDataFromOpenAPI( + specFile: string, + dataOutPath: string, + articleOutPath: string +): void { + if (!fs.existsSync(dataOutPath)) { + fs.mkdirSync(dataOutPath, { recursive: true }); + } + + openapiPathsToHugo.generateHugoData({ + dataOutPath, + articleOutPath, + specFile, + }); +} + +/** + * Generate Hugo content pages from article data + * + * Creates markdown files with frontmatter from article metadata. + * Each article becomes a page with type: api that renders via Scalar. + * + * @param articlesPath - Path to the articles data directory + * @param contentPath - Output path for generated content pages + */ +function generatePagesFromArticleData( + articlesPath: string, + contentPath: string +): void { + const yaml = require('js-yaml'); + const articlesFile = path.join(articlesPath, 'articles.yml'); + + if (!fs.existsSync(articlesFile)) { + console.warn(`⚠️ Articles file not found: ${articlesFile}`); + return; + } + + // Read articles data + const articlesContent = fs.readFileSync(articlesFile, 'utf8'); + const data = yaml.load(articlesContent) as { + articles: Array<{ path: string; fields: Record }>; + }; + + if (!data.articles || !Array.isArray(data.articles)) { + console.warn(`⚠️ No articles found in ${articlesFile}`); + return; + } + + // Ensure content directory exists + if (!fs.existsSync(contentPath)) { + fs.mkdirSync(contentPath, { recursive: true }); + } + + // Generate a page for each article + for (const article of data.articles) { + const pagePath = path.join(contentPath, article.path); + const pageFile = path.join(pagePath, '_index.md'); + + // Create directory if needed + if (!fs.existsSync(pagePath)) { + fs.mkdirSync(pagePath, { recursive: true }); + } + + // Generate frontmatter + const frontmatter = { + title: article.fields.name || article.path, + description: `API reference for ${article.fields.name || article.path}`, + type: 'api', + staticFilePath: article.fields.staticFilePath, + weight: 100, + }; + + const pageContent = `--- +${yaml.dump(frontmatter)}--- +`; + + fs.writeFileSync(pageFile, pageContent); + } + + console.log( + `✓ Generated ${data.articles.length} content pages in ${contentPath}` + ); +} + +/** + * Product configurations for all InfluxDB editions + * + * Maps product identifiers to their OpenAPI specs and content directories + */ +const productConfigs: ProductConfigMap = { + 'cloud-v2': { + specFile: path.join(API_DOCS_ROOT, 'influxdb/cloud/v2/ref.yml'), + pagesDir: path.join(DOCS_ROOT, 'content/influxdb/cloud/api/v2'), + description: 'InfluxDB Cloud (v2 API)', + }, + 'oss-v2': { + specFile: path.join(API_DOCS_ROOT, 'influxdb/v2/v2/ref.yml'), + pagesDir: path.join(DOCS_ROOT, 'content/influxdb/v2/api/v2'), + description: 'InfluxDB OSS v2', + }, + 'influxdb3-core': { + specFile: path.join(API_DOCS_ROOT, 'influxdb3/core/v3/ref.yml'), + pagesDir: path.join(DOCS_ROOT, 'content/influxdb3/core/reference/api'), + description: 'InfluxDB 3 Core', + }, + 'influxdb3-enterprise': { + specFile: path.join(API_DOCS_ROOT, 'influxdb3/enterprise/v3/ref.yml'), + pagesDir: path.join( + DOCS_ROOT, + 'content/influxdb3/enterprise/reference/api' + ), + description: 'InfluxDB 3 Enterprise', + }, + 'cloud-dedicated': { + specFile: path.join( + API_DOCS_ROOT, + 'influxdb3/cloud-dedicated/management/openapi.yml' + ), + pagesDir: path.join(DOCS_ROOT, 'content/influxdb3/cloud-dedicated/api'), + description: 'InfluxDB Cloud Dedicated', + }, + 'cloud-serverless': { + specFile: path.join( + API_DOCS_ROOT, + 'influxdb3/cloud-serverless/management/openapi.yml' + ), + pagesDir: path.join(DOCS_ROOT, 'content/influxdb3/cloud-serverless/api'), + description: 'InfluxDB Cloud Serverless', + }, + clustered: { + specFile: path.join( + API_DOCS_ROOT, + 'influxdb3/clustered/management/openapi.yml' + ), + pagesDir: path.join(DOCS_ROOT, 'content/influxdb3/clustered/api'), + description: 'InfluxDB Clustered', + }, +}; + +/** + * Process a single product: fetch spec, generate data, and create pages + * + * @param productKey - Product identifier (e.g., 'cloud-v2') + * @param config - Product configuration + */ +function processProduct(productKey: string, config: ProductConfig): void { + console.log('\n' + '='.repeat(80)); + console.log(`Processing ${config.description || productKey}`); + console.log('='.repeat(80)); + + const staticPath = path.join(DOCS_ROOT, 'static/openapi'); + const staticSpecPath = path.join(staticPath, `influxdb-${productKey}.yml`); + const staticJsonSpecPath = path.join( + staticPath, + `influxdb-${productKey}.json` + ); + const staticPathsPath = path.join(staticPath, `influxdb-${productKey}/paths`); + const articlesPath = path.join( + DOCS_ROOT, + `data/article-data/influxdb/${productKey}` + ); + + // Check if spec file exists + if (!fs.existsSync(config.specFile)) { + console.warn(`⚠️ Spec file not found: ${config.specFile}`); + console.log('Skipping this product. Run getswagger.sh first if needed.\n'); + return; + } + + try { + // Step 1: Execute the getswagger.sh script to fetch/bundle the spec + const getswaggerScript = path.join(API_DOCS_ROOT, 'getswagger.sh'); + if (fs.existsSync(getswaggerScript)) { + execCommand( + `${getswaggerScript} ${productKey} -B`, + `Fetching OpenAPI spec for ${productKey}` + ); + } else { + console.log(`⚠️ getswagger.sh not found, skipping fetch step`); + } + + // Step 2: Ensure static directory exists + if (!fs.existsSync(staticPath)) { + fs.mkdirSync(staticPath, { recursive: true }); + } + + // Step 3: Copy the generated OpenAPI spec to static folder (YAML) + if (fs.existsSync(config.specFile)) { + fs.copyFileSync(config.specFile, staticSpecPath); + console.log(`✓ Copied spec to ${staticSpecPath}`); + + // Step 4: Generate JSON version of the spec + try { + const yaml = require('js-yaml'); + const specContent = fs.readFileSync(config.specFile, 'utf8'); + const specObject = yaml.load(specContent); + fs.writeFileSync( + staticJsonSpecPath, + JSON.stringify(specObject, null, 2) + ); + console.log(`✓ Generated JSON spec at ${staticJsonSpecPath}`); + } catch (jsonError) { + console.warn(`⚠️ Could not generate JSON spec: ${jsonError}`); + } + } + + // Step 5: Generate Hugo data from OpenAPI spec (path fragments for AI agents) + generateDataFromOpenAPI(config.specFile, staticPathsPath, articlesPath); + + // Step 6: Generate Hugo content pages from article data + generatePagesFromArticleData(articlesPath, config.pagesDir); + + console.log( + `\n✅ Successfully processed ${config.description || productKey}\n` + ); + } catch (error) { + console.error(`\n❌ Error processing ${productKey}:`, error); + process.exit(1); + } +} + +/** + * Main execution function + */ +function main(): void { + const args = process.argv.slice(2); + + // Determine which products to process + let productsToProcess: string[]; + + if (args.length === 0) { + // No arguments: process all products + productsToProcess = Object.keys(productConfigs); + console.log('\n📋 Processing all products...\n'); + } else { + // Arguments provided: process only specified products + productsToProcess = args; + console.log( + `\n📋 Processing specified products: ${productsToProcess.join(', ')}\n` + ); + } + + // Validate product keys + const invalidProducts = productsToProcess.filter( + (key) => !productConfigs[key] + ); + if (invalidProducts.length > 0) { + console.error( + `\n❌ Invalid product identifier(s): ${invalidProducts.join(', ')}` + ); + console.error('\nValid products:'); + Object.keys(productConfigs).forEach((key) => { + console.error(` - ${key}: ${productConfigs[key].description}`); + }); + process.exit(1); + } + + // Process each product + productsToProcess.forEach((productKey) => { + const config = productConfigs[productKey]; + processProduct(productKey, config); + }); + + console.log('\n' + '='.repeat(80)); + console.log('✅ All products processed successfully!'); + console.log('='.repeat(80) + '\n'); +} + +// Execute if run directly +if (require.main === module) { + main(); +} + +// Export for use as a module +export { + productConfigs, + processProduct, + generateDataFromOpenAPI, + generatePagesFromArticleData, +}; diff --git a/api-docs/scripts/openapi-paths-to-hugo-data/index.ts b/api-docs/scripts/openapi-paths-to-hugo-data/index.ts new file mode 100644 index 0000000000..c634101bef --- /dev/null +++ b/api-docs/scripts/openapi-paths-to-hugo-data/index.ts @@ -0,0 +1,528 @@ +/** + * OpenAPI to Hugo Data Converter + * + * Converts OpenAPI v3 specifications into Hugo-compatible data files. + * Generates both YAML and JSON versions of spec fragments grouped by path. + * + * @module openapi-paths-to-hugo-data + */ + +import * as yaml from 'js-yaml'; +import * as fs from 'fs'; +import * as path from 'path'; + +/** + * OpenAPI path item object + */ +interface PathItem { + get?: Operation; + post?: Operation; + put?: Operation; + patch?: Operation; + delete?: Operation; + options?: Operation; + head?: Operation; + trace?: Operation; + parameters?: Parameter[]; + [key: string]: unknown; +} + +/** + * OpenAPI operation object + */ +interface Operation { + operationId?: string; + summary?: string; + description?: string; + tags?: string[]; + parameters?: Parameter[]; + requestBody?: RequestBody; + responses?: Record; + [key: string]: unknown; +} + +/** + * OpenAPI parameter object + */ +interface Parameter { + name: string; + in: 'query' | 'header' | 'path' | 'cookie'; + description?: string; + required?: boolean; + schema?: Schema; + [key: string]: unknown; +} + +/** + * OpenAPI request body object + */ +interface RequestBody { + description?: string; + content?: Record; + required?: boolean; + [key: string]: unknown; +} + +/** + * OpenAPI response object + */ +interface Response { + description: string; + content?: Record; + headers?: Record; + [key: string]: unknown; +} + +/** + * OpenAPI media type object + */ +interface MediaType { + schema?: Schema; + example?: unknown; + examples?: Record; + [key: string]: unknown; +} + +/** + * OpenAPI schema object + */ +interface Schema { + type?: string; + format?: string; + description?: string; + properties?: Record; + items?: Schema; + required?: string[]; + [key: string]: unknown; +} + +/** + * OpenAPI header object + */ +interface Header { + description?: string; + schema?: Schema; + [key: string]: unknown; +} + +/** + * OpenAPI example object + */ +interface Example { + summary?: string; + description?: string; + value?: unknown; + [key: string]: unknown; +} + +/** + * OpenAPI document structure + */ +interface OpenAPIDocument { + openapi: string; + info: Info; + paths: Record; + components?: Components; + servers?: Server[]; + tags?: Tag[]; + description?: string; + 'x-pathGroup'?: string; + [key: string]: unknown; +} + +/** + * OpenAPI info object + */ +interface Info { + title: string; + version: string; + description?: string; + termsOfService?: string; + contact?: Contact; + license?: License; + [key: string]: unknown; +} + +/** + * OpenAPI contact object + */ +interface Contact { + name?: string; + url?: string; + email?: string; + [key: string]: unknown; +} + +/** + * OpenAPI license object + */ +interface License { + name: string; + url?: string; + [key: string]: unknown; +} + +/** + * OpenAPI components object + */ +interface Components { + schemas?: Record; + responses?: Record; + parameters?: Record; + requestBodies?: Record; + headers?: Record; + securitySchemes?: Record; + [key: string]: unknown; +} + +/** + * OpenAPI security scheme object + */ +interface SecurityScheme { + type: string; + description?: string; + [key: string]: unknown; +} + +/** + * OpenAPI server object + */ +interface Server { + url: string; + description?: string; + variables?: Record; + [key: string]: unknown; +} + +/** + * OpenAPI server variable object + */ +interface ServerVariable { + default: string; + enum?: string[]; + description?: string; + [key: string]: unknown; +} + +/** + * OpenAPI tag object + */ +interface Tag { + name: string; + description?: string; + externalDocs?: ExternalDocs; + [key: string]: unknown; +} + +/** + * OpenAPI external docs object + */ +interface ExternalDocs { + url: string; + description?: string; + [key: string]: unknown; +} + +/** + * Article metadata for Hugo + */ +interface Article { + path: string; + fields: { + name: string; + describes: string[]; + title?: string; + description?: string; + tags?: string[]; + source?: string; + staticFilePath?: string; + }; +} + +/** + * Article collection for Hugo data files + */ +interface ArticleCollection { + articles: Article[]; +} + +/** + * Options for generating Hugo data + */ +export interface GenerateHugoDataOptions { + /** Path to the OpenAPI spec file */ + specFile: string; + /** Output path for generated OpenAPI path fragments */ + dataOutPath: string; + /** Output path for article metadata */ + articleOutPath: string; +} + +/** + * Options for writing OpenAPI article data + */ +interface WriteOpenapiArticleDataOptions { + /** File pattern to match when filtering files */ + filePattern?: string; +} + +/** + * Read a YAML file and parse it + * + * @param filepath - Path to the YAML file + * @param encoding - File encoding (default: 'utf8') + * @returns Parsed YAML content + */ +function readFile( + filepath: string, + encoding: BufferEncoding = 'utf8' +): OpenAPIDocument { + const content = fs.readFileSync(filepath, encoding); + return yaml.load(content) as OpenAPIDocument; +} + +/** + * Write data to a YAML file + * + * @param data - Data to write + * @param outputTo - Output file path + */ +function writeDataFile(data: unknown, outputTo: string): void { + fs.writeFileSync(outputTo, yaml.dump(data)); +} + +/** + * Write data to a JSON file + * + * @param data - Data to write + * @param outputTo - Output file path + */ +function writeJsonFile(data: unknown, outputTo: string): void { + fs.writeFileSync(outputTo, JSON.stringify(data, null, 2)); +} + +/** + * OpenAPI utility functions + */ +const openapiUtils = { + /** + * Check if a path fragment is a placeholder (e.g., {id}) + * + * @param str - Path fragment to check + * @returns True if the fragment is a placeholder + */ + isPlaceholderFragment(str: string): boolean { + const placeholderRegex = /^\{.*\}$/; + return placeholderRegex.test(str); + }, +}; + +/** + * Write OpenAPI specs grouped by path to separate files + * Generates both YAML and JSON versions + * + * @param openapi - OpenAPI document + * @param prefix - Filename prefix for output files + * @param outPath - Output directory path + */ +function writePathOpenapis( + openapi: OpenAPIDocument, + prefix: string, + outPath: string +): void { + const pathGroups: Record> = {}; + + // Group paths by their base path (first 3-4 segments, excluding placeholders) + Object.keys(openapi.paths) + .sort() + .forEach((p) => { + const delimiter = '/'; + let key = p.split(delimiter); + + // Check if this is an item path (ends with a placeholder) + let isItemPath = openapiUtils.isPlaceholderFragment(key[key.length - 1]); + if (isItemPath) { + key = key.slice(0, -1); + } + + // Take first 4 segments + key = key.slice(0, 4); + + // Check if the last segment is still a placeholder + isItemPath = openapiUtils.isPlaceholderFragment(key[key.length - 1]); + if (isItemPath) { + key = key.slice(0, -1); + } + + const groupKey = key.join('/'); + pathGroups[groupKey] = pathGroups[groupKey] || {}; + pathGroups[groupKey][p] = openapi.paths[p]; + }); + + // Write each path group to separate YAML and JSON files + Object.keys(pathGroups).forEach((pg) => { + // Deep copy openapi + const doc: OpenAPIDocument = JSON.parse(JSON.stringify(openapi)); + doc.paths = pathGroups[pg]; + doc.info.title = `${pg}\n${doc.info.title}`; + doc['x-pathGroup'] = pg; + + try { + if (!fs.existsSync(outPath)) { + fs.mkdirSync(outPath, { recursive: true }); + } + + const baseFilename = `${prefix}${pg.replaceAll('/', '-').replace(/^-/, '')}`; + const yamlPath = path.resolve(outPath, `${baseFilename}.yaml`); + const jsonPath = path.resolve(outPath, `${baseFilename}.json`); + + // Write both YAML and JSON versions + writeDataFile(doc, yamlPath); + writeJsonFile(doc, jsonPath); + + console.log(`Generated: ${baseFilename}.yaml and ${baseFilename}.json`); + } catch (err) { + console.error(`Error writing path group ${pg}:`, err); + } + }); +} + +/** + * Create article metadata for a path group + * + * @param openapi - OpenAPI document with x-pathGroup + * @returns Article metadata object + */ +function createArticleDataForPathGroup(openapi: OpenAPIDocument): Article { + const article: Article = { + path: '', + fields: { + name: openapi['x-pathGroup'] || '', + describes: Object.keys(openapi.paths), + }, + }; + + /** + * Convert path to snake case for article path + * + * @param p - Path to convert + * @returns Snake-cased path + */ + const snakifyPath = (p: string): string => { + if (!p) { + return ''; + } + return p.replace(/^\//, '').replaceAll('/', '-'); + }; + + article.path = snakifyPath(openapi['x-pathGroup'] || ''); + article.fields.title = openapi.info?.title; + article.fields.description = openapi.description; + + const pathGroupFrags = path.parse(openapi['x-pathGroup'] || ''); + article.fields.tags = [pathGroupFrags?.dir, pathGroupFrags?.name] + .filter(Boolean) + .map((t) => snakifyPath(t)); + + return article; +} + +/** + * Write OpenAPI article metadata to Hugo data files + * Generates articles.yml and articles.json + * + * @param sourcePath - Path to directory containing OpenAPI fragment files + * @param targetPath - Output path for article data + * @param opts - Options including file pattern filter + */ +function writeOpenapiArticleData( + sourcePath: string, + targetPath: string, + opts: WriteOpenapiArticleDataOptions +): void { + /** + * Check if path is a file + */ + const isFile = (filePath: string): boolean => { + return fs.lstatSync(filePath).isFile(); + }; + + /** + * Check if filename matches pattern + */ + const matchesPattern = (filePath: string): boolean => { + return opts.filePattern + ? path.parse(filePath).name.startsWith(opts.filePattern) + : true; + }; + + try { + const articles = fs + .readdirSync(sourcePath) + .map((fileName) => path.join(sourcePath, fileName)) + .filter(matchesPattern) + .filter(isFile) + .filter( + (filePath) => filePath.endsWith('.yaml') || filePath.endsWith('.yml') + ) // Only process YAML files + .map((filePath) => { + const openapi = readFile(filePath); + const article = createArticleDataForPathGroup(openapi); + article.fields.source = filePath; + // Hugo omits "/static" from the URI when serving files stored in "./static" + article.fields.staticFilePath = filePath.replace(/^static\//, '/'); + return article; + }); + + if (!fs.existsSync(targetPath)) { + fs.mkdirSync(targetPath, { recursive: true }); + } + + const articleCollection: ArticleCollection = { articles }; + + // Write both YAML and JSON versions + const yamlPath = path.resolve(targetPath, 'articles.yml'); + const jsonPath = path.resolve(targetPath, 'articles.json'); + + writeDataFile(articleCollection, yamlPath); + writeJsonFile(articleCollection, jsonPath); + + console.log(`Generated ${articles.length} articles in ${targetPath}`); + } catch (e) { + console.error('Error writing article data:', e); + } +} + +/** + * Generate Hugo data files from an OpenAPI specification + * + * This function: + * 1. Reads the OpenAPI spec file + * 2. Groups paths by their base path + * 3. Writes each group to separate YAML and JSON files + * 4. Generates article metadata for Hugo + * + * @param options - Generation options + */ +export function generateHugoData(options: GenerateHugoDataOptions): void { + const filenamePrefix = `${path.parse(options.specFile).name}-`; + + const sourceFile = readFile(options.specFile, 'utf8'); + + console.log(`\nGenerating OpenAPI path files in ${options.dataOutPath}....`); + writePathOpenapis(sourceFile, filenamePrefix, options.dataOutPath); + + console.log( + `\nGenerating OpenAPI article data in ${options.articleOutPath}...` + ); + writeOpenapiArticleData(options.dataOutPath, options.articleOutPath, { + filePattern: filenamePrefix, + }); + + console.log('\nGeneration complete!\n'); +} + +// CommonJS export for backward compatibility +module.exports = { + generateHugoData, +}; diff --git a/api-docs/scripts/openapi-paths-to-hugo-data/package.json b/api-docs/scripts/openapi-paths-to-hugo-data/package.json new file mode 100644 index 0000000000..78bd5bc114 --- /dev/null +++ b/api-docs/scripts/openapi-paths-to-hugo-data/package.json @@ -0,0 +1,14 @@ +{ + "name": "openapi-paths-to-hugo-data", + "version": "1.0.0", + "description": "Convert OpenAPI specifications to Hugo data files for API documentation", + "main": "index.js", + "type": "commonjs", + "dependencies": { + "js-yaml": "^4.1.1" + }, + "devDependencies": {}, + "scripts": {}, + "author": "InfluxData", + "license": "MIT" +} diff --git a/api-docs/scripts/openapi-paths-to-hugo-data/yarn.lock b/api-docs/scripts/openapi-paths-to-hugo-data/yarn.lock new file mode 100644 index 0000000000..96bb86828b --- /dev/null +++ b/api-docs/scripts/openapi-paths-to-hugo-data/yarn.lock @@ -0,0 +1,32 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@types/js-yaml@^4.0.9": + version "4.0.9" + resolved "https://registry.yarnpkg.com/@types/js-yaml/-/js-yaml-4.0.9.tgz#cd82382c4f902fed9691a2ed79ec68c5898af4c2" + integrity sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg== + +"@types/node@^24.10.1": + version "24.10.1" + resolved "https://registry.yarnpkg.com/@types/node/-/node-24.10.1.tgz#91e92182c93db8bd6224fca031e2370cef9a8f01" + integrity sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ== + dependencies: + undici-types "~7.16.0" + +argparse@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" + integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== + +js-yaml@^4.1.0: + version "4.1.1" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.1.tgz#854c292467705b699476e1a2decc0c8a3458806b" + integrity sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA== + dependencies: + argparse "^2.0.1" + +undici-types@~7.16.0: + version "7.16.0" + resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-7.16.0.tgz#ffccdff36aea4884cbfce9a750a0580224f58a46" + integrity sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw== diff --git a/api-docs/scripts/tsconfig.json b/api-docs/scripts/tsconfig.json new file mode 100644 index 0000000000..e36776534b --- /dev/null +++ b/api-docs/scripts/tsconfig.json @@ -0,0 +1,25 @@ +{ + "compilerOptions": { + "target": "ES2021", + "lib": ["ES2021"], + "module": "CommonJS", + "moduleResolution": "node", + "outDir": "./dist", + "rootDir": ".", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "resolveJsonModule": true, + "declaration": true, + "sourceMap": true, + "types": ["node"] + }, + "include": [ + "**/*.ts" + ], + "exclude": [ + "node_modules", + "dist" + ] +} diff --git a/config/_default/hugo.yml b/config/_default/hugo.yml index c576413cd4..08859dea60 100644 --- a/config/_default/hugo.yml +++ b/config/_default/hugo.yml @@ -98,6 +98,8 @@ module: params: env: development environment: development + # API documentation renderer: "scalar" (default) or "rapidoc" + apiRenderer: scalar # Configure the server for development server: diff --git a/data/article-data/influxdb/cloud-v2/articles.json b/data/article-data/influxdb/cloud-v2/articles.json new file mode 100644 index 0000000000..2267c4303d --- /dev/null +++ b/data/article-data/influxdb/cloud-v2/articles.json @@ -0,0 +1,597 @@ +{ + "articles": [ + { + "path": "api-v2-authorizations", + "fields": { + "name": "/api/v2/authorizations", + "describes": [ + "/api/v2/authorizations", + "/api/v2/authorizations/{authID}" + ], + "title": "/api/v2/authorizations\nInfluxDB Cloud API Service", + "tags": [ + "api-v2", + "authorizations" + ], + "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-authorizations.yaml", + "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-authorizations.yaml" + } + }, + { + "path": "api-v2-buckets", + "fields": { + "name": "/api/v2/buckets", + "describes": [ + "/api/v2/buckets", + "/api/v2/buckets/{bucketID}", + "/api/v2/buckets/{bucketID}/labels", + "/api/v2/buckets/{bucketID}/labels/{labelID}", + "/api/v2/buckets/{bucketID}/members", + "/api/v2/buckets/{bucketID}/members/{userID}", + "/api/v2/buckets/{bucketID}/owners", + "/api/v2/buckets/{bucketID}/owners/{userID}", + "/api/v2/buckets/{bucketID}/schema/measurements", + "/api/v2/buckets/{bucketID}/schema/measurements/{measurementID}" + ], + "title": "/api/v2/buckets\nInfluxDB Cloud API Service", + "tags": [ + "api-v2", + "buckets" + ], + "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-buckets.yaml", + "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-buckets.yaml" + } + }, + { + "path": "api-v2-checks", + "fields": { + "name": "/api/v2/checks", + "describes": [ + "/api/v2/checks", + "/api/v2/checks/{checkID}", + "/api/v2/checks/{checkID}/labels", + "/api/v2/checks/{checkID}/labels/{labelID}", + "/api/v2/checks/{checkID}/query" + ], + "title": "/api/v2/checks\nInfluxDB Cloud API Service", + "tags": [ + "api-v2", + "checks" + ], + "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-checks.yaml", + "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-checks.yaml" + } + }, + { + "path": "api-v2-dashboards", + "fields": { + "name": "/api/v2/dashboards", + "describes": [ + "/api/v2/dashboards", + "/api/v2/dashboards/{dashboardID}", + "/api/v2/dashboards/{dashboardID}/cells", + "/api/v2/dashboards/{dashboardID}/cells/{cellID}", + "/api/v2/dashboards/{dashboardID}/cells/{cellID}/view", + "/api/v2/dashboards/{dashboardID}/labels", + "/api/v2/dashboards/{dashboardID}/labels/{labelID}", + "/api/v2/dashboards/{dashboardID}/members", + "/api/v2/dashboards/{dashboardID}/members/{userID}", + "/api/v2/dashboards/{dashboardID}/owners", + "/api/v2/dashboards/{dashboardID}/owners/{userID}" + ], + "title": "/api/v2/dashboards\nInfluxDB Cloud API Service", + "tags": [ + "api-v2", + "dashboards" + ], + "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-dashboards.yaml", + "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-dashboards.yaml" + } + }, + { + "path": "api-v2-dbrps", + "fields": { + "name": "/api/v2/dbrps", + "describes": [ + "/api/v2/dbrps", + "/api/v2/dbrps/{dbrpID}" + ], + "title": "/api/v2/dbrps\nInfluxDB Cloud API Service", + "tags": [ + "api-v2", + "dbrps" + ], + "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-dbrps.yaml", + "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-dbrps.yaml" + } + }, + { + "path": "api-v2-delete", + "fields": { + "name": "/api/v2/delete", + "describes": [ + "/api/v2/delete" + ], + "title": "/api/v2/delete\nInfluxDB Cloud API Service", + "tags": [ + "api-v2", + "delete" + ], + "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-delete.yaml", + "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-delete.yaml" + } + }, + { + "path": "api-v2-flags", + "fields": { + "name": "/api/v2/flags", + "describes": [ + "/api/v2/flags" + ], + "title": "/api/v2/flags\nInfluxDB Cloud API Service", + "tags": [ + "api-v2", + "flags" + ], + "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-flags.yaml", + "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-flags.yaml" + } + }, + { + "path": "api-v2-labels", + "fields": { + "name": "/api/v2/labels", + "describes": [ + "/api/v2/labels", + "/api/v2/labels/{labelID}" + ], + "title": "/api/v2/labels\nInfluxDB Cloud API Service", + "tags": [ + "api-v2", + "labels" + ], + "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-labels.yaml", + "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-labels.yaml" + } + }, + { + "path": "api-v2-maps", + "fields": { + "name": "/api/v2/maps", + "describes": [ + "/api/v2/maps/mapToken" + ], + "title": "/api/v2/maps\nInfluxDB Cloud API Service", + "tags": [ + "api-v2", + "maps" + ], + "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-maps.yaml", + "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-maps.yaml" + } + }, + { + "path": "api-v2-me", + "fields": { + "name": "/api/v2/me", + "describes": [ + "/api/v2/me", + "/api/v2/me/password" + ], + "title": "/api/v2/me\nInfluxDB Cloud API Service", + "tags": [ + "api-v2", + "me" + ], + "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-me.yaml", + "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-me.yaml" + } + }, + { + "path": "api-v2-notificationEndpoints", + "fields": { + "name": "/api/v2/notificationEndpoints", + "describes": [ + "/api/v2/notificationEndpoints", + "/api/v2/notificationEndpoints/{endpointID}", + "/api/v2/notificationEndpoints/{endpointID}/labels", + "/api/v2/notificationEndpoints/{endpointID}/labels/{labelID}" + ], + "title": "/api/v2/notificationEndpoints\nInfluxDB Cloud API Service", + "tags": [ + "api-v2", + "notificationEndpoints" + ], + "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-notificationEndpoints.yaml", + "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-notificationEndpoints.yaml" + } + }, + { + "path": "api-v2-notificationRules", + "fields": { + "name": "/api/v2/notificationRules", + "describes": [ + "/api/v2/notificationRules", + "/api/v2/notificationRules/{ruleID}", + "/api/v2/notificationRules/{ruleID}/labels", + "/api/v2/notificationRules/{ruleID}/labels/{labelID}", + "/api/v2/notificationRules/{ruleID}/query" + ], + "title": "/api/v2/notificationRules\nInfluxDB Cloud API Service", + "tags": [ + "api-v2", + "notificationRules" + ], + "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-notificationRules.yaml", + "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-notificationRules.yaml" + } + }, + { + "path": "api-v2-orgs", + "fields": { + "name": "/api/v2/orgs", + "describes": [ + "/api/v2/orgs", + "/api/v2/orgs/{orgID}", + "/api/v2/orgs/{orgID}/limits", + "/api/v2/orgs/{orgID}/members", + "/api/v2/orgs/{orgID}/members/{userID}", + "/api/v2/orgs/{orgID}/owners", + "/api/v2/orgs/{orgID}/owners/{userID}", + "/api/v2/orgs/{orgID}/secrets", + "/api/v2/orgs/{orgID}/secrets/delete", + "/api/v2/orgs/{orgID}/secrets/{secretID}", + "/api/v2/orgs/{orgID}/usage" + ], + "title": "/api/v2/orgs\nInfluxDB Cloud API Service", + "tags": [ + "api-v2", + "orgs" + ], + "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-orgs.yaml", + "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-orgs.yaml" + } + }, + { + "path": "api-v2-query", + "fields": { + "name": "/api/v2/query", + "describes": [ + "/api/v2/query", + "/api/v2/query/analyze", + "/api/v2/query/ast", + "/api/v2/query/suggestions", + "/api/v2/query/suggestions/{name}" + ], + "title": "/api/v2/query\nInfluxDB Cloud API Service", + "tags": [ + "api-v2", + "query" + ], + "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-query.yaml", + "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-query.yaml" + } + }, + { + "path": "api-v2-resources", + "fields": { + "name": "/api/v2/resources", + "describes": [ + "/api/v2/resources" + ], + "title": "/api/v2/resources\nInfluxDB Cloud API Service", + "tags": [ + "api-v2", + "resources" + ], + "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-resources.yaml", + "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-resources.yaml" + } + }, + { + "path": "api-v2-scripts", + "fields": { + "name": "/api/v2/scripts", + "describes": [ + "/api/v2/scripts", + "/api/v2/scripts/{scriptID}", + "/api/v2/scripts/{scriptID}/invoke", + "/api/v2/scripts/{scriptID}/params" + ], + "title": "/api/v2/scripts\nInfluxDB Cloud API Service", + "tags": [ + "api-v2", + "scripts" + ], + "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-scripts.yaml", + "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-scripts.yaml" + } + }, + { + "path": "api-v2-setup", + "fields": { + "name": "/api/v2/setup", + "describes": [ + "/api/v2/setup", + "/api/v2/setup/user" + ], + "title": "/api/v2/setup\nInfluxDB Cloud API Service", + "tags": [ + "api-v2", + "setup" + ], + "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-setup.yaml", + "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-setup.yaml" + } + }, + { + "path": "api-v2-signin", + "fields": { + "name": "/api/v2/signin", + "describes": [ + "/api/v2/signin" + ], + "title": "/api/v2/signin\nInfluxDB Cloud API Service", + "tags": [ + "api-v2", + "signin" + ], + "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-signin.yaml", + "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-signin.yaml" + } + }, + { + "path": "api-v2-signout", + "fields": { + "name": "/api/v2/signout", + "describes": [ + "/api/v2/signout" + ], + "title": "/api/v2/signout\nInfluxDB Cloud API Service", + "tags": [ + "api-v2", + "signout" + ], + "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-signout.yaml", + "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-signout.yaml" + } + }, + { + "path": "api-v2-stacks", + "fields": { + "name": "/api/v2/stacks", + "describes": [ + "/api/v2/stacks", + "/api/v2/stacks/{stack_id}", + "/api/v2/stacks/{stack_id}/uninstall" + ], + "title": "/api/v2/stacks\nInfluxDB Cloud API Service", + "tags": [ + "api-v2", + "stacks" + ], + "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-stacks.yaml", + "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-stacks.yaml" + } + }, + { + "path": "api-v2-tasks", + "fields": { + "name": "/api/v2/tasks", + "describes": [ + "/api/v2/tasks", + "/api/v2/tasks/{taskID}", + "/api/v2/tasks/{taskID}/labels", + "/api/v2/tasks/{taskID}/labels/{labelID}", + "/api/v2/tasks/{taskID}/logs", + "/api/v2/tasks/{taskID}/members", + "/api/v2/tasks/{taskID}/members/{userID}", + "/api/v2/tasks/{taskID}/owners", + "/api/v2/tasks/{taskID}/owners/{userID}", + "/api/v2/tasks/{taskID}/runs", + "/api/v2/tasks/{taskID}/runs/{runID}", + "/api/v2/tasks/{taskID}/runs/{runID}/logs", + "/api/v2/tasks/{taskID}/runs/{runID}/retry" + ], + "title": "/api/v2/tasks\nInfluxDB Cloud API Service", + "tags": [ + "api-v2", + "tasks" + ], + "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-tasks.yaml", + "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-tasks.yaml" + } + }, + { + "path": "api-v2-telegraf", + "fields": { + "name": "/api/v2/telegraf", + "describes": [ + "/api/v2/telegraf/plugins" + ], + "title": "/api/v2/telegraf\nInfluxDB Cloud API Service", + "tags": [ + "api-v2", + "telegraf" + ], + "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-telegraf.yaml", + "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-telegraf.yaml" + } + }, + { + "path": "api-v2-telegrafs", + "fields": { + "name": "/api/v2/telegrafs", + "describes": [ + "/api/v2/telegrafs", + "/api/v2/telegrafs/{telegrafID}", + "/api/v2/telegrafs/{telegrafID}/labels", + "/api/v2/telegrafs/{telegrafID}/labels/{labelID}", + "/api/v2/telegrafs/{telegrafID}/members", + "/api/v2/telegrafs/{telegrafID}/members/{userID}", + "/api/v2/telegrafs/{telegrafID}/owners", + "/api/v2/telegrafs/{telegrafID}/owners/{userID}" + ], + "title": "/api/v2/telegrafs\nInfluxDB Cloud API Service", + "tags": [ + "api-v2", + "telegrafs" + ], + "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-telegrafs.yaml", + "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-telegrafs.yaml" + } + }, + { + "path": "api-v2-templates", + "fields": { + "name": "/api/v2/templates", + "describes": [ + "/api/v2/templates/apply", + "/api/v2/templates/export" + ], + "title": "/api/v2/templates\nInfluxDB Cloud API Service", + "tags": [ + "api-v2", + "templates" + ], + "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-templates.yaml", + "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-templates.yaml" + } + }, + { + "path": "api-v2-users", + "fields": { + "name": "/api/v2/users", + "describes": [ + "/api/v2/users", + "/api/v2/users/{userID}", + "/api/v2/users/{userID}/password" + ], + "title": "/api/v2/users\nInfluxDB Cloud API Service", + "tags": [ + "api-v2", + "users" + ], + "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-users.yaml", + "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-users.yaml" + } + }, + { + "path": "api-v2-variables", + "fields": { + "name": "/api/v2/variables", + "describes": [ + "/api/v2/variables", + "/api/v2/variables/{variableID}", + "/api/v2/variables/{variableID}/labels", + "/api/v2/variables/{variableID}/labels/{labelID}" + ], + "title": "/api/v2/variables\nInfluxDB Cloud API Service", + "tags": [ + "api-v2", + "variables" + ], + "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-variables.yaml", + "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-variables.yaml" + } + }, + { + "path": "api-v2-write", + "fields": { + "name": "/api/v2/write", + "describes": [ + "/api/v2/write" + ], + "title": "/api/v2/write\nInfluxDB Cloud API Service", + "tags": [ + "api-v2", + "write" + ], + "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-write.yaml", + "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-write.yaml" + } + }, + { + "path": "api-v2", + "fields": { + "name": "/api/v2", + "describes": [ + "/api/v2" + ], + "title": "/api/v2\nInfluxDB Cloud API Service", + "tags": [ + "api", + "v2" + ], + "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2.yaml", + "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2.yaml" + } + }, + { + "path": "legacy-authorizations", + "fields": { + "name": "/legacy/authorizations", + "describes": [ + "/legacy/authorizations", + "/legacy/authorizations/{authID}", + "/legacy/authorizations/{authID}/password" + ], + "title": "/legacy/authorizations\nInfluxDB Cloud API Service", + "tags": [ + "legacy", + "authorizations" + ], + "source": "static/openapi/influxdb-cloud-v2/paths/ref-legacy-authorizations.yaml", + "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-legacy-authorizations.yaml" + } + }, + { + "path": "ping", + "fields": { + "name": "/ping", + "describes": [ + "/ping" + ], + "title": "/ping\nInfluxDB Cloud API Service", + "tags": [ + "", + "ping" + ], + "source": "static/openapi/influxdb-cloud-v2/paths/ref-ping.yaml", + "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-ping.yaml" + } + }, + { + "path": "query", + "fields": { + "name": "/query", + "describes": [ + "/query" + ], + "title": "/query\nInfluxDB Cloud API Service", + "tags": [ + "", + "query" + ], + "source": "static/openapi/influxdb-cloud-v2/paths/ref-query.yaml", + "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-query.yaml" + } + }, + { + "path": "write", + "fields": { + "name": "/write", + "describes": [ + "/write" + ], + "title": "/write\nInfluxDB Cloud API Service", + "tags": [ + "", + "write" + ], + "source": "static/openapi/influxdb-cloud-v2/paths/ref-write.yaml", + "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-write.yaml" + } + } + ] +} \ No newline at end of file diff --git a/layouts/_default/api.html b/layouts/_default/api.html index 68a09ae661..bee90952e4 100644 --- a/layouts/_default/api.html +++ b/layouts/_default/api.html @@ -1 +1,33 @@ -{{ .Content }} +{{/* + API Documentation Default Layout + + Full page layout for API documentation using the renderer abstraction. + The renderer (Scalar or RapiDoc) is selected via site.Params.apiRenderer. + + Required frontmatter: + - staticFilePath: Path to the OpenAPI specification file +*/}} + +{{ partial "header.html" . }} +{{ partial "topnav.html" . }} + +
+ {{ partial "sidebar.html" . }} +
+
+
+

{{ .Title }}

+ {{ with .Description }} +

{{ . }}

+ {{ end }} +
+ + {{/* Render API documentation using the configured renderer */}} + {{ partial "api/renderer.html" . }} + +
+ +
+
+ +{{ partial "footer.html" . }} diff --git a/layouts/api/list.html b/layouts/api/list.html new file mode 100644 index 0000000000..6c865e70ff --- /dev/null +++ b/layouts/api/list.html @@ -0,0 +1,33 @@ +{{/* + API Documentation List/Section Layout + + Uses the renderer abstraction to display API documentation. + The renderer (Scalar or RapiDoc) is selected via site.Params.apiRenderer. + + Required frontmatter: + - staticFilePath: Path to the OpenAPI specification file +*/}} + +{{ partial "header.html" . }} +{{ partial "topnav.html" . }} + +
+ {{ partial "sidebar.html" . }} +
+
+
+

{{ .Title }}

+ {{ with .Description }} +

{{ . }}

+ {{ end }} +
+ + {{/* Render API documentation using the configured renderer */}} + {{ partial "api/renderer.html" . }} + +
+ +
+
+ +{{ partial "footer.html" . }} diff --git a/layouts/api/single.html b/layouts/api/single.html new file mode 100644 index 0000000000..3bf97deb1a --- /dev/null +++ b/layouts/api/single.html @@ -0,0 +1,33 @@ +{{/* + API Documentation Single Page Layout + + Uses the renderer abstraction to display API documentation. + The renderer (Scalar or RapiDoc) is selected via site.Params.apiRenderer. + + Required frontmatter: + - staticFilePath: Path to the OpenAPI specification file +*/}} + +{{ partial "header.html" . }} +{{ partial "topnav.html" . }} + +
+ {{ partial "sidebar.html" . }} +
+
+
+

{{ .Title }}

+ {{ with .Description }} +

{{ . }}

+ {{ end }} +
+ + {{/* Render API documentation using the configured renderer */}} + {{ partial "api/renderer.html" . }} + +
+ +
+
+ +{{ partial "footer.html" . }} diff --git a/layouts/partials/api/rapidoc.html b/layouts/partials/api/rapidoc.html new file mode 100644 index 0000000000..01b491deb8 --- /dev/null +++ b/layouts/partials/api/rapidoc.html @@ -0,0 +1,179 @@ +{{/* + RapiDoc API Documentation Renderer (Legacy) + + Legacy API documentation renderer using RapiDoc. + Maintained for backward compatibility. + + Required page params: + - staticFilePath: Path to the OpenAPI specification file +*/}} + +{{ $specPath := .Params.staticFilePath }} +{{ $specPathJSON := replace $specPath ".yaml" ".json" | replace ".yml" ".json" }} + +{{/* Machine-readable links for AI agent discovery */}} +{{ if $specPath }} + + +{{ end }} + +
+ {{/* Download link for the spec */}} + {{ if $specPath }} + + {{ end }} + + {{/* RapiDoc component */}} + +
+ +{{/* Load RapiDoc from CDN */}} + + + + + diff --git a/layouts/partials/api/renderer.html b/layouts/partials/api/renderer.html new file mode 100644 index 0000000000..c6b795f37f --- /dev/null +++ b/layouts/partials/api/renderer.html @@ -0,0 +1,25 @@ +{{/* + API Renderer Abstraction + + Selects and loads the appropriate API documentation renderer based on site + configuration. Supports: + - scalar (default): Modern, accessible API documentation + - rapidoc: Legacy renderer for backward compatibility + + Required page params: + - staticFilePath: Path to the OpenAPI specification file + + Site params: + - apiRenderer: "scalar" or "rapidoc" (defaults to "scalar") +*/}} + +{{ $renderer := site.Params.apiRenderer | default "scalar" }} + +{{ if eq $renderer "scalar" }} + {{ partial "api/scalar.html" . }} +{{ else if eq $renderer "rapidoc" }} + {{ partial "api/rapidoc.html" . }} +{{ else }} + {{/* Fallback to scalar if unknown renderer specified */}} + {{ partial "api/scalar.html" . }} +{{ end }} diff --git a/layouts/partials/api/scalar.html b/layouts/partials/api/scalar.html new file mode 100644 index 0000000000..885e286ed8 --- /dev/null +++ b/layouts/partials/api/scalar.html @@ -0,0 +1,186 @@ +{{/* + Scalar API Documentation Renderer + + Modern, accessible API documentation powered by Scalar. + Features: + - Dark/light theme support synchronized with site theme + - InfluxData brand colors + - Responsive layout + - AI agent spec discovery via link[rel=alternate] + - Download link for OpenAPI spec + + Required page params: + - staticFilePath: Path to the OpenAPI specification file +*/}} + +{{ $specPath := .Params.staticFilePath }} +{{ $specPathJSON := replace $specPath ".yaml" ".json" | replace ".yml" ".json" }} + +{{/* Machine-readable links for AI agent discovery */}} +{{ if $specPath }} + + +{{ end }} + +
+ {{/* Download link for the spec */}} + {{ if $specPath }} + + {{ end }} + + {{/* Scalar API Reference container */}} +
+
+ +{{/* Load Scalar from CDN */}} + + + + + From b22faf03742ba1b536abf05ef03ed06b0edee9e7 Mon Sep 17 00:00:00 2001 From: Jason Stirnaman Date: Tue, 2 Dec 2025 15:53:58 -0600 Subject: [PATCH 02/51] fix(shortcodes): Fix Hugo 0.134 compatibility issues - latest-patch.html: Replace deprecated .Store with local variable assignment. The .Store method was removed from shortcode context in newer Hugo versions. - api-endpoint.html: Add nil check for productRef lookup to prevent index errors when productKey is not in productAliases dictionary. Falls back to "influxdb" as default product reference. --- layouts/shortcodes/api-endpoint.html | 10 ++++++---- layouts/shortcodes/latest-patch.html | 13 +++++++------ 2 files changed, 13 insertions(+), 10 deletions(-) diff --git a/layouts/shortcodes/api-endpoint.html b/layouts/shortcodes/api-endpoint.html index bb48c4f786..e3c57682bb 100644 --- a/layouts/shortcodes/api-endpoint.html +++ b/layouts/shortcodes/api-endpoint.html @@ -7,10 +7,12 @@ {{- $parsedProductKey := cond $isOSS "oss" $currentVersion -}} {{- $productKey := .Get "influxdb_host" | default $parsedProductKey -}} {{- $productAliases := dict "oss" "influxdb" "cloud" "influxdb_cloud" "cloud-tsm" "influxdb_cloud" "core" "influxdb3_core" "enterprise" "influxdb3_enterprise" "cloud-serverless" "influxdb3_cloud_serverless" "serverless" "influxdb3_cloud_serverless" "cloud-dedicated" "influxdb3_cloud_dedicated" "dedicated" "influxdb3_cloud_dedicated" "clustered" "influxdb3_clustered" -}} -{{- $productRef := index $productAliases $productKey -}} -{{- $productData := dict -}} -{{- with $productRef }}{{- $productData = index $.Site.Data.products . | default dict -}}{{- end -}} -{{- $placeholderHost := $productData.placeholder_host | default "localhost:8086" }} +{{- $productRef := index $productAliases $productKey | default "influxdb" -}} +{{- $productData := index .Site.Data.products $productRef -}} +{{- $placeholderHost := "" -}} +{{- with $productData -}} + {{- $placeholderHost = .placeholder_host -}} +{{- end -}} {{- $method := .Get "method" | upper -}} {{- $methodStyle := .Get "method" | lower -}} {{- $apiRef := .Get "api-ref" | default "" -}} diff --git a/layouts/shortcodes/latest-patch.html b/layouts/shortcodes/latest-patch.html index df03397a7b..484d606ca3 100644 --- a/layouts/shortcodes/latest-patch.html +++ b/layouts/shortcodes/latest-patch.html @@ -10,19 +10,20 @@ {{- $patchVersions := index (index .Site.Data.products $product) "latest_patches" -}} {{- $cliVersions := index .Site.Data.products.influxdb "latest_cli" -}} {{- $isInfluxDB3 := eq $product "influxdb3" -}} +{{- $patchVersion := "" -}} {{- if $cli }} {{- if eq $version "cloud" -}} - {{- .Store.Set "patchVersion" (index $cliVersions $latestVersion) -}} + {{- $patchVersion = index $cliVersions $latestVersion -}} {{- else -}} - {{- .Store.Set "patchVersion" (index $cliVersions $version) -}} + {{- $patchVersion = index $cliVersions $version -}} {{- end -}} {{- else -}} {{- if eq $version "cloud" -}} - {{- .Store.Set "patchVersion" (index $patchVersions $latestVersion) -}} + {{- $patchVersion = index $patchVersions $latestVersion -}} {{- else if $isInfluxDB3 -}} - {{- .Store.Set "patchVersion" (index .Site.Data.products (print $product "_" $version)).latest_patch -}} + {{- $patchVersion = (index .Site.Data.products (print $product "_" $version)).latest_patch -}} {{- else -}} - {{- .Store.Set "patchVersion" (index $patchVersions $version) -}} + {{- $patchVersion = index $patchVersions $version -}} {{- end -}} {{- end -}} -{{- .Store.Get "patchVersion" -}} \ No newline at end of file +{{- $patchVersion -}} \ No newline at end of file From c3281d87f2d83ab4a7a5842af80f820ba188db2c Mon Sep 17 00:00:00 2001 From: Jason Stirnaman Date: Tue, 2 Dec 2025 16:11:49 -0600 Subject: [PATCH 03/51] chore: Update build:api-docs script to use new generator - yarn build:api-docs: Generate API docs for all products - yarn build:api-docs cloud-v2: Generate for specific product - yarn build:api-docs:compile: Recompile TypeScript if modified --- package.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/package.json b/package.json index 22dc9e6864..5ee36aa39f 100644 --- a/package.json +++ b/package.json @@ -62,7 +62,8 @@ "docs:create": "node scripts/docs-create.js", "docs:edit": "node scripts/docs-edit.js", "docs:add-placeholders": "node scripts/add-placeholders.js", - "build:api-docs": "cd api-docs && sh generate-api-docs.sh", + "build:api-docs": "node api-docs/scripts/dist/generate-openapi-articles.js", + "build:api-docs:compile": "tsc --project api-docs/scripts/tsconfig.json", "build:pytest:image": "docker build -t influxdata/docs-pytest:latest -f Dockerfile.pytest .", "build:agent:instructions": "node ./helper-scripts/build-agent-instructions.js", "build:ts": "tsc --project tsconfig.json --outDir dist", From b1b7dc607e9ca3b94ecc072d6374e78e301e6d40 Mon Sep 17 00:00:00 2001 From: Jason Stirnaman Date: Fri, 5 Dec 2025 10:27:02 -0600 Subject: [PATCH 04/51] chore(claude): Add claude skill for Hugo development/testing workflow. Split ui-dev subagent into hugo-, ts-, and testing-specific agents. --- .claude/agents/ui-testing.md | 2 +- .claude/skills/hugo-template-dev/SKILL.md | 49 +---------------------- 2 files changed, 2 insertions(+), 49 deletions(-) diff --git a/.claude/agents/ui-testing.md b/.claude/agents/ui-testing.md index eea687edb3..7bb01fcf31 100644 --- a/.claude/agents/ui-testing.md +++ b/.claude/agents/ui-testing.md @@ -50,7 +50,7 @@ node cypress/support/run-e2e-specs.js \ # Run against a URL (for running server) node cypress/support/run-e2e-specs.js \ --spec "cypress/e2e/content/my-test.cy.js" \ - http://localhost:/path/to/page/ + http://localhost:1313/path/to/page/ # Run all E2E tests yarn test:e2e diff --git a/.claude/skills/hugo-template-dev/SKILL.md b/.claude/skills/hugo-template-dev/SKILL.md index 7139074f09..83bb0ac0b3 100644 --- a/.claude/skills/hugo-template-dev/SKILL.md +++ b/.claude/skills/hugo-template-dev/SKILL.md @@ -432,7 +432,7 @@ node cypress/support/run-e2e-specs.js \ ```bash node cypress/support/run-e2e-specs.js \ --spec "cypress/e2e/content/api-reference.cy.js" \ - http://localhost:/influxdb3/core/reference/api/ + http://localhost:1313/influxdb3/core/reference/api/ ``` **Example Cypress test structure for API reference:** @@ -467,53 +467,6 @@ describe('API Reference Documentation', () => { }); ``` -**Check for JavaScript console errors (common pattern for feature development):** - -```javascript -// cypress/e2e/content/my-component.cy.js -describe('My Component', () => { - it('should not throw JavaScript console errors', () => { - cy.visit('/path/to/page/'); - - // Wait for component to initialize - cy.get('[data-component="my-component"]', { timeout: 5000 }) - .should('be.visible'); - - cy.window().then((win) => { - const logs = []; - const originalError = win.console.error; - - // Intercept console.error calls - win.console.error = (...args) => { - logs.push(args.join(' ')); - originalError.apply(win.console, args); - }; - - // Allow time for async operations - cy.wait(2000); - - cy.then(() => { - // Filter for relevant errors (customize for your component) - const relevantErrors = logs.filter( - (log) => - log.includes('my-component') || - log.includes('Failed to parse') || - log.includes('is not a function') - ); - expect(relevantErrors).to.have.length(0); - }); - }); - }); -}); -``` - -This pattern is especially useful for catching: - -- TypeScript/JavaScript runtime errors in components -- JSON parsing failures from `data-*` attributes -- Undefined function calls from missing imports -- Template data binding issues that only manifest at runtime - **Integrate Cypress into development workflow:** 1. Create test file in `cypress/e2e/content/` for your feature From 3c3a272980668376e2b283a52868a5fe75c95208 Mon Sep 17 00:00:00 2001 From: Jason Stirnaman Date: Mon, 8 Dec 2025 14:02:56 -0600 Subject: [PATCH 05/51] fix(api): Add operations list styling and regroup sidebar navigation - Add CSS for operations list cards with method badges, paths, and summaries - Remove duplicate Overview section from list.html (was duplicating summary) - Split "Data Operations" into separate nav groups: Write data, Query data, Cache data --- assets/styles/layouts/_api-layout.scss | 667 +++++++++++++++++++++++++ data/api_nav_groups.yml | 48 ++ layouts/api/list.html | 120 ++++- 3 files changed, 820 insertions(+), 15 deletions(-) create mode 100644 assets/styles/layouts/_api-layout.scss create mode 100644 data/api_nav_groups.yml diff --git a/assets/styles/layouts/_api-layout.scss b/assets/styles/layouts/_api-layout.scss new file mode 100644 index 0000000000..bd44ba1ca2 --- /dev/null +++ b/assets/styles/layouts/_api-layout.scss @@ -0,0 +1,667 @@ +/////////////////////////////// API Reference Layout /////////////////////////////// +// +// 3-column layout for API reference documentation: +// - Left: Existing Hugo sidebar + API navigation section +// - Center: Content with page-level tabs (Operations | Server | Auth | Compatibility) +// - Right: "ON THIS PAGE" table of contents +// +//////////////////////////////////////////////////////////////////////////////// + +// Content wrapper becomes flex container when used with API content +// Override overflow:hidden from _content-wrapper.scss to enable sticky positioning +.content-wrapper.api-content { + display: flex; + flex-direction: row; + align-items: flex-start; + overflow: visible; // Required for sticky TOC to work +} + +// Main API content area (center column) +.api-main { + flex: 1; + min-width: 0; // Prevent flex item from overflowing + padding-right: 1rem; +} + +// Right-side TOC (third column) +.api-toc { + width: 200px; + flex-shrink: 0; + position: sticky; + top: 80px; // Account for fixed header height + align-self: flex-start; // Critical for sticky to work in flexbox + max-height: calc(100vh - 100px); + overflow-y: auto; + padding: 1rem; + border-left: 1px solid $nav-border; + + // Hidden state (used when Operations/RapiDoc tab is active) + &.is-hidden { + display: none; + } + + &-header { + font-size: 0.75rem; + font-weight: $bold; + text-transform: uppercase; + letter-spacing: 0.08rem; + color: rgba($article-heading, 0.5); + margin: 0 0 1rem; + } + + &-nav { + // TOC list styles + .api-toc-list { + list-style: none; + margin: 0; + padding: 0; + } + + .api-toc-item { + margin: 0; + + &--nested { + padding-left: 0.75rem; + } + } + + .api-toc-link { + display: block; + padding: 0.35rem 0; + font-size: 0.85rem; + color: $nav-item; + text-decoration: none; + transition: color 0.2s; + line-height: 1.4; + + &:hover { + color: $nav-item-hover; + } + + &.is-active { + color: $nav-active; + font-weight: $medium; + } + } + } + + &-empty { + font-size: 0.85rem; + color: rgba($article-text, 0.5); + font-style: italic; + } + + // Operations-based TOC (for tag-based pages) + &-nav .api-toc-list--operations { + .api-toc-item--operation { + margin: 0.35rem 0; + } + + .api-toc-link--operation { + display: flex; + align-items: center; + gap: 0.5rem; + font-size: 0.8rem; + padding: 0.3rem 0; + } + + // HTTP method badges in TOC + .api-method { + display: inline-block; + font-size: 0.6rem; + font-weight: $bold; + text-transform: uppercase; + padding: 0.15rem 0.3rem; + border-radius: 3px; + min-width: 2.2rem; + text-align: center; + flex-shrink: 0; + + &--get { background-color: $gr-rainforest; color: #fff; } + &--post { background-color: $b-ocean; color: #fff; } + &--put { background-color: $br-galaxy; color: #fff; } + &--patch { background-color: $y-thunder; color: rgba($g5-pepper, 0.75); } + &--delete { background-color: $r-curacao; color: #fff; } + } + + .api-path { + font-family: $code; + font-size: 0.75rem; + word-break: break-all; + color: inherit; + } + } +} + +//////////////////////////////////////////////////////////////////////////////// +////////////////////////// Operations List (Main Content) ////////////////////// +//////////////////////////////////////////////////////////////////////////////// + +// Operations list section +.api-operations-list { + margin: 2rem 0; + + h2 { + margin-bottom: 1rem; + } +} + +// Grid container for operation cards +.api-operations-grid { + display: flex; + flex-direction: column; + gap: 0.5rem; +} + +// Individual operation card (clickable link) +.api-operation-card { + display: flex; + align-items: flex-start; + gap: 0.75rem; + padding: 0.75rem 1rem; + background: rgba($article-bg, 0.5); + border: 1px solid $nav-border; + border-radius: $radius; + text-decoration: none; + color: $article-text; + transition: background-color 0.2s, border-color 0.2s; + + &:hover { + background: rgba($article-bg, 0.8); + border-color: $nav-item-hover; + } + + // HTTP method badge + .api-method { + display: inline-block; + font-size: 0.7rem; + font-weight: $bold; + text-transform: uppercase; + padding: 0.2rem 0.4rem; + border-radius: 3px; + min-width: 3.5rem; + text-align: center; + flex-shrink: 0; + margin-top: 0.15rem; + + &--get { background-color: $gr-rainforest; color: #fff; } + &--post { background-color: $b-ocean; color: #fff; } + &--put { background-color: $br-galaxy; color: #fff; } + &--patch { background-color: $y-thunder; color: rgba($g5-pepper, 0.75); } + &--delete { background-color: $r-curacao; color: #fff; } + } + + // API path in monospace + .api-path { + font-family: $code; + font-size: 0.9rem; + color: $article-heading; + word-break: break-all; + flex-shrink: 0; + } + + // Operation summary text + .api-operation-summary { + font-size: 0.875rem; + color: rgba($article-text, 0.8); + flex: 1; + } +} + +//////////////////////////////////////////////////////////////////////////////// +////////////////////////// API Navigation in Sidebar /////////////////////////// +//////////////////////////////////////////////////////////////////////////////// + +// API navigation section added to the existing Hugo sidebar +.api-nav { + margin-top: 2rem; + padding-top: 1rem; + border-top: 1px solid $nav-border; + + &-header { + font-size: 0.85rem; + font-weight: $bold; + text-transform: uppercase; + letter-spacing: 0.06rem; + color: rgba($article-heading, 0.6); + margin: 0 0 1rem; + padding-left: 1.5rem; + } + + // API nav groups (collapsible sections) + &-group { + margin-bottom: 0.5rem; + + &-header { + display: flex; + align-items: center; + padding: 0.5rem 0 0.5rem 1.5rem; + font-weight: $medium; + color: $nav-category; + cursor: pointer; + transition: color 0.2s; + + &:hover { + color: $nav-category-hover; + } + + // Collapse/expand indicator + &::before { + content: ""; + display: inline-block; + width: 0; + height: 0; + margin-right: 0.5rem; + border-left: 5px solid $nav-border; + border-top: 4px solid transparent; + border-bottom: 4px solid transparent; + transition: transform 0.2s; + } + + &.is-open::before { + transform: rotate(90deg); + } + } + + &-items { + list-style: none; + padding-left: 2.5rem; + margin: 0; + max-height: 0; + overflow: hidden; + transition: max-height 0.3s ease-out; + + &.is-open { + max-height: 1000px; // Large enough to show all items + } + } + } + + // Individual API nav items + &-item { + margin: 0.25rem 0; + position: relative; + + a { + display: flex; + align-items: center; + padding: 0.35rem 0; + color: $nav-item; + text-decoration: none; + font-size: 0.95rem; + transition: color 0.2s; + + &:hover { + color: $nav-item-hover; + } + } + + &.is-active a { + color: $nav-active; + font-weight: $medium; + } + + // HTTP method badge + .method-badge { + display: inline-block; + font-size: 0.65rem; + font-weight: $bold; + text-transform: uppercase; + padding: 0.15rem 0.35rem; + margin-right: 0.5rem; + border-radius: 3px; + min-width: 2.5rem; + text-align: center; + + &.get { background-color: $gr-rainforest; color: #fff; } + &.post { background-color: $b-ocean; color: #fff; } + &.put { background-color: $br-galaxy; color: #fff; } + &.patch { background-color: $y-thunder; color: rgba($g5-pepper, 0.75); } + &.delete { background-color: $r-curacao; color: #fff; } + } + } +} + +//////////////////////////////////////////////////////////////////////////////// +/////////////////////////// API Header with Actions //////////////////////////// +//////////////////////////////////////////////////////////////////////////////// + +// Header row with title and download button +.article--header-row { + display: flex; + justify-content: space-between; + align-items: flex-start; + gap: 1rem; + flex-wrap: wrap; +} + +.article--header-text { + flex: 1; + min-width: 200px; +} + +// Download OpenAPI spec button +.api-spec-actions { + flex-shrink: 0; +} + +.api-spec-download { + display: inline-flex; + align-items: center; + gap: 0.5rem; + padding: 0.5rem 1rem; + background-color: $g20-white; + color: $article-text; + text-decoration: none; + border-radius: $radius; + font-size: 0.875rem; + font-weight: $medium; + transition: background-color 0.2s, color 0.2s; + border: 1px solid $nav-border; + white-space: nowrap; + + &:hover { + background-color: $r-curacao; + color: $g20-white; + border-color: $r-curacao; + } + + svg { + flex-shrink: 0; + } +} + +//////////////////////////////////////////////////////////////////////////////// +////////////////////////////////// API Tabs //////////////////////////////////// +//////////////////////////////////////////////////////////////////////////////// + +// API-specific tab wrapper (uses api-tabs-wrapper to avoid conflict with +// tabbed-content.js which handles .tabs-wrapper elements) +.api-tabs-wrapper { + margin: 1.5rem 0 1rem; +} + +// API tab navigation bar +.api-tabs-nav { + display: flex; + flex-wrap: wrap; + gap: 2px; + + a { + flex-grow: 1; + position: relative; + font-size: 1rem; + font-weight: $medium; + padding: 0.65rem 1.25rem; + display: inline-block; + white-space: nowrap; + text-align: center; + color: $article-tab-text !important; + border-radius: $radius; + background-color: $article-tab-bg; + text-decoration: none; + transition: background-color 0.2s, color 0.2s; + z-index: 1; + + &::after { + content: ''; + position: absolute; + display: block; + top: 0; + right: 0; + width: 100%; + height: 100%; + border-radius: $radius; + @include gradient($article-btn-gradient); + opacity: 0; + transition: opacity 0.2s; + z-index: -1; + } + + &:hover { + color: $article-tab-active-text !important; + &::after { + opacity: 1; + } + } + + &.is-active { + color: $article-tab-active-text !important; + &::after { + opacity: 1; + @include gradient($article-btn-gradient); + } + } + } +} + +// Tab panels container +.api-tab-panels { + // Tab content visibility (follows existing pattern) + .tab-content:not(:first-of-type) { + display: none; + } + + // RapiDoc container styling + rapi-doc { + display: block; + width: 100%; + min-height: 400px; + } +} + +//////////////////////////////////////////////////////////////////////////////// +////////////////////////////// RapiDoc Overrides /////////////////////////////// +//////////////////////////////////////////////////////////////////////////////// + +// Hide RapiDoc's internal navigation (we provide our own) +rapi-doc::part(section-navbar) { + display: none !important; +} + +// Hide RapiDoc's internal tag headers/titles (we use custom tabs for navigation) +// label-tag-title is the "PROCESSING ENGINE" header with auth badges shown in tag groups +rapi-doc::part(label-tag-title) { + display: none !important; +} + +// Hide RapiDoc's authentication section (we have separate Auth tab) +rapi-doc::part(section-auth) { + display: none !important; +} + +// Ensure RapiDoc content fills available space +rapi-doc::part(section-main-content) { + padding: 0; +} + +// Match RapiDoc's operation section styling to our theme +rapi-doc::part(section-operations) { + padding: 0; +} + +//////////////////////////////////////////////////////////////////////////////// +////////////////////////// Authentication Tab Content ////////////////////////// +//////////////////////////////////////////////////////////////////////////////// + +.api-auth-content { + max-width: 800px; +} + +.api-auth-card { + background: $article-bg; + border: 1px solid $nav-border; + border-radius: $radius; + padding: 1.5rem; + margin-bottom: 1.5rem; + + h3 { + margin-top: 0; + margin-bottom: 0.5rem; + } + + h4 { + margin-top: 1rem; + margin-bottom: 0.5rem; + font-size: 0.9rem; + text-transform: uppercase; + letter-spacing: 0.05em; + color: rgba($article-text, 0.6); + } + + pre { + margin: 0.5rem 0; + padding: 1rem; + background: $article-code-bg; + border-radius: $radius; + overflow-x: auto; + } + + code { + font-family: $code; + font-size: 0.875rem; + } +} + +.api-auth-badge .badge { + display: inline-block; + padding: 0.25rem 0.5rem; + font-size: 0.75rem; + font-weight: $bold; + text-transform: uppercase; + border-radius: $radius; + + &.recommended { + background: $gr-rainforest; + color: $g20-white; + } +} + +//////////////////////////////////////////////////////////////////////////////// +//////////////////////////// Server Tab Content //////////////////////////////// +//////////////////////////////////////////////////////////////////////////////// + +.api-server-panel { + max-width: 600px; + + h2 { + margin-top: 0; + } +} + +.server-url-config { + display: flex; + gap: 0.5rem; + align-items: flex-end; + margin: 1rem 0; + flex-wrap: wrap; + + label { + width: 100%; + font-weight: $medium; + margin-bottom: 0.25rem; + } + + input { + flex: 1; + min-width: 200px; + padding: 0.5rem; + border: 1px solid $nav-border; + border-radius: $radius; + font-family: $code; + background: $article-bg; + color: $article-text; + } + + button { + padding: 0.5rem 1rem; + background: $r-curacao; + color: $g20-white; + border: none; + border-radius: $radius; + cursor: pointer; + font-weight: $medium; + + &:hover { + background: darken($r-curacao, 10%); + } + } +} + +.server-info { + margin-top: 1.5rem; + + ul { + list-style: disc; + padding-left: 1.5rem; + } + + li { + margin: 0.5rem 0; + } + + code { + background: $article-code-bg; + padding: 0.2rem 0.4rem; + border-radius: 3px; + font-family: $code; + } +} + +//////////////////////////////////////////////////////////////////////////////// +///////////////////////////////// MEDIA QUERIES //////////////////////////////// +//////////////////////////////////////////////////////////////////////////////// + +// Tablet: Hide TOC, keep sidebar +@include media(large) { + .content-wrapper.api-content { + flex-direction: column; + } + + .api-toc { + display: none; + } + + .api-main { + padding-right: 0; + } +} + +// Mobile: Standard Hugo sidebar behavior +@include media(medium) { + .content-wrapper.api-content { + flex-direction: column; + } + + .api-toc { + display: none; + } + + .api-main { + padding-right: 0; + } + + // Collapse API nav in mobile view + .api-nav { + margin-top: 1rem; + padding-top: 0.5rem; + + &-group-items { + max-height: none; // Show all items by default in mobile + } + } +} + +// Large screens: Wider TOC +@include media(xlarge) { + .api-toc { + width: 240px; + } +} + +// Match sidebar responsive widths +@media (min-width: 801px) and (max-width: 1200px) { + .api-toc { + width: 180px; + } +} diff --git a/data/api_nav_groups.yml b/data/api_nav_groups.yml new file mode 100644 index 0000000000..9598e548cc --- /dev/null +++ b/data/api_nav_groups.yml @@ -0,0 +1,48 @@ +# API Navigation Groups +# Defines sidebar navigation structure for API reference documentation +# Tags are grouped by function/task for better UX + +groups: + - name: Concepts + weight: 1 + tags: + - Quick start + - Authentication + - Headers and parameters + + - name: Write data + weight: 2 + tags: + - Write data + + - name: Query data + weight: 3 + tags: + - Query data + + - name: Cache data + weight: 4 + tags: + - Cache data + + - name: Administration + weight: 5 + tags: + - Database + - Table + - Token + + - name: Processing Engine + weight: 6 + tags: + - Processing engine + + - name: Server + weight: 7 + tags: + - Server information + + - name: Compatibility + weight: 8 + tags: + - Compatibility endpoints diff --git a/layouts/api/list.html b/layouts/api/list.html index 6c865e70ff..9be024de96 100644 --- a/layouts/api/list.html +++ b/layouts/api/list.html @@ -1,32 +1,122 @@ {{/* - API Documentation List/Section Layout + API Documentation Tag/List Layout - Uses the renderer abstraction to display API documentation. - The renderer (Scalar or RapiDoc) is selected via site.Params.apiRenderer. + Displays tag-based API documentation pages with: + 1. Title + 2. Summary (brief description) + 3. Operations list (links to nested operation pages) + 4. Description (detailed content) + + For conceptual pages (isConceptual: true), shows content without operations list. Required frontmatter: - - staticFilePath: Path to the OpenAPI specification file + - title: Page title + - description or summary: Brief description + - operations: Array of operation objects (for non-conceptual pages) */}} {{ partial "header.html" . }} {{ partial "topnav.html" . }}
+ {{/* Left: Existing Hugo sidebar (includes API nav via sidebar.html) */}} {{ partial "sidebar.html" . }} -
-
-
-

{{ .Title }}

- {{ with .Description }} -

{{ . }}

+ + {{/* Center + Right: Content and TOC */}} +
+
+
+
+
+
+

{{ .Title }}

+ + {{/* Summary - brief description at top */}} + {{ with .Params.summary }} +

{{ . | markdownify }}

+ {{ else }} + {{/* Fallback to first line of description if no summary */}} + {{ with .Description }} +

{{ . | truncate 200 | markdownify }}

+ {{ end }} + {{ end }} +
+ + {{/* Download OpenAPI spec button */}} + {{ with .Params.staticFilePath }} + + {{ end }} +
+
+ + {{ $isConceptual := .Params.isConceptual | default false }} + + {{ if $isConceptual }} + {{/* Conceptual Page - Show content directly */}} +
+ {{ with .Content }} + {{ . }} + {{ else }} + {{ with .Params.tagDescription }} + {{ . | markdownify }} + {{ end }} + {{ end }} +
+ {{ else }} + {{/* Operational Page - Show operations list then description */}} + + {{/* Operations List */}} + {{ $operations := .Params.operations }} + {{ if $operations }} +
+

Endpoints

+
+ {{ range $operations }} + + {{ upper .method }} + {{ .path }} + {{ .summary }} + + {{ end }} +
+
{{ end }} -
- {{/* Render API documentation using the configured renderer */}} - {{ partial "api/renderer.html" . }} + {{/* Hugo page content if any */}} + {{ with .Content }} +
+ {{ . }} +
+ {{ end }} + + {{/* RapiDoc renderer for API operations */}} + {{ with .Params.staticFilePath }} +
+ {{ partial "api/rapidoc.html" $ }} +
+ {{ end }} + + {{ end }} + + {{/* Related documentation links */}} + {{ partial "article/related.html" . }} + +
+
- - + {{/* Right: Page TOC - "ON THIS PAGE" */}} +
From de408322184f300c60fbfc3c5abeacdb8983e045 Mon Sep 17 00:00:00 2001 From: Jason Stirnaman Date: Mon, 8 Dec 2025 14:04:29 -0600 Subject: [PATCH 06/51] chore(claude): Update ui-testing agent and hugo-template-dev skill --- .claude/agents/ui-testing.md | 2 +- .claude/skills/hugo-template-dev/SKILL.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.claude/agents/ui-testing.md b/.claude/agents/ui-testing.md index 7bb01fcf31..eea687edb3 100644 --- a/.claude/agents/ui-testing.md +++ b/.claude/agents/ui-testing.md @@ -50,7 +50,7 @@ node cypress/support/run-e2e-specs.js \ # Run against a URL (for running server) node cypress/support/run-e2e-specs.js \ --spec "cypress/e2e/content/my-test.cy.js" \ - http://localhost:1313/path/to/page/ + http://localhost:/path/to/page/ # Run all E2E tests yarn test:e2e diff --git a/.claude/skills/hugo-template-dev/SKILL.md b/.claude/skills/hugo-template-dev/SKILL.md index 83bb0ac0b3..db71e1a99c 100644 --- a/.claude/skills/hugo-template-dev/SKILL.md +++ b/.claude/skills/hugo-template-dev/SKILL.md @@ -432,7 +432,7 @@ node cypress/support/run-e2e-specs.js \ ```bash node cypress/support/run-e2e-specs.js \ --spec "cypress/e2e/content/api-reference.cy.js" \ - http://localhost:1313/influxdb3/core/reference/api/ + http://localhost:/influxdb3/core/reference/api/ ``` **Example Cypress test structure for API reference:** From 7c04b8ad579814ea17c932d7dce7d5a82e327084 Mon Sep 17 00:00:00 2001 From: Jason Stirnaman Date: Mon, 8 Dec 2025 14:04:45 -0600 Subject: [PATCH 07/51] feat(api): Update OpenAPI article generators for tag-based structure - Add support for tag-based article generation with operations metadata - Generate articles.yml data files with tag, menuName, and isConceptual fields - Include operations array in frontmatter for tag pages --- .../scripts/dist/generate-openapi-articles.js | 681 +++++++++------ .../dist/openapi-paths-to-hugo-data/index.js | 779 +++++++++++++----- api-docs/scripts/generate-openapi-articles.ts | 400 ++++++++- .../openapi-paths-to-hugo-data/index.ts | 551 ++++++++++++- 4 files changed, 1930 insertions(+), 481 deletions(-) diff --git a/api-docs/scripts/dist/generate-openapi-articles.js b/api-docs/scripts/dist/generate-openapi-articles.js index 92dcc39115..8fc31252b3 100644 --- a/api-docs/scripts/dist/generate-openapi-articles.js +++ b/api-docs/scripts/dist/generate-openapi-articles.js @@ -1,5 +1,5 @@ #!/usr/bin/env node -'use strict'; +"use strict"; /** * Generate OpenAPI Articles Script * @@ -20,58 +20,47 @@ * * @module generate-openapi-articles */ -var __createBinding = - (this && this.__createBinding) || - (Object.create - ? function (o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if ( - !desc || - ('get' in desc ? !m.__esModule : desc.writable || desc.configurable) - ) { - desc = { - enumerable: true, - get: function () { - return m[k]; - }, - }; - } - Object.defineProperty(o, k2, desc); - } - : function (o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; - }); -var __setModuleDefault = - (this && this.__setModuleDefault) || - (Object.create - ? function (o, v) { - Object.defineProperty(o, 'default', { enumerable: true, value: v }); - } - : function (o, v) { - o['default'] = v; - }); -var __importStar = - (this && this.__importStar) || - function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) - for (var k in mod) - if (k !== 'default' && Object.prototype.hasOwnProperty.call(mod, k)) - __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; - }; -Object.defineProperty(exports, '__esModule', { value: true }); +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +Object.defineProperty(exports, "__esModule", { value: true }); exports.productConfigs = void 0; exports.processProduct = processProduct; exports.generateDataFromOpenAPI = generateDataFromOpenAPI; exports.generatePagesFromArticleData = generatePagesFromArticleData; -const child_process_1 = require('child_process'); -const path = __importStar(require('path')); -const fs = __importStar(require('fs')); +const child_process_1 = require("child_process"); +const path = __importStar(require("path")); +const fs = __importStar(require("fs")); // Import the OpenAPI to Hugo converter const openapiPathsToHugo = require('./openapi-paths-to-hugo-data/index.js'); // Calculate the relative paths @@ -85,19 +74,20 @@ const API_DOCS_ROOT = 'api-docs'; * @throws Exits process with code 1 on error */ function execCommand(command, description) { - try { - if (description) { - console.log(`\n${description}...`); + try { + if (description) { + console.log(`\n${description}...`); + } + console.log(`Executing: ${command}\n`); + (0, child_process_1.execSync)(command, { stdio: 'inherit' }); } - console.log(`Executing: ${command}\n`); - (0, child_process_1.execSync)(command, { stdio: 'inherit' }); - } catch (error) { - console.error(`\n❌ Error executing command: ${command}`); - if (error instanceof Error) { - console.error(error.message); + catch (error) { + console.error(`\n❌ Error executing command: ${command}`); + if (error instanceof Error) { + console.error(error.message); + } + process.exit(1); } - process.exit(1); - } } /** * Generate Hugo data files from OpenAPI specification @@ -107,14 +97,14 @@ function execCommand(command, description) { * @param articleOutPath - Output path for article metadata */ function generateDataFromOpenAPI(specFile, dataOutPath, articleOutPath) { - if (!fs.existsSync(dataOutPath)) { - fs.mkdirSync(dataOutPath, { recursive: true }); - } - openapiPathsToHugo.generateHugoData({ - dataOutPath, - articleOutPath, - specFile, - }); + if (!fs.existsSync(dataOutPath)) { + fs.mkdirSync(dataOutPath, { recursive: true }); + } + openapiPathsToHugo.generateHugoData({ + dataOutPath, + articleOutPath, + specFile, + }); } /** * Generate Hugo content pages from article data @@ -122,51 +112,213 @@ function generateDataFromOpenAPI(specFile, dataOutPath, articleOutPath) { * Creates markdown files with frontmatter from article metadata. * Each article becomes a page with type: api that renders via Scalar. * - * @param articlesPath - Path to the articles data directory - * @param contentPath - Output path for generated content pages + * @param options - Generation options */ -function generatePagesFromArticleData(articlesPath, contentPath) { - const yaml = require('js-yaml'); - const articlesFile = path.join(articlesPath, 'articles.yml'); - if (!fs.existsSync(articlesFile)) { - console.warn(`⚠️ Articles file not found: ${articlesFile}`); - return; - } - // Read articles data - const articlesContent = fs.readFileSync(articlesFile, 'utf8'); - const data = yaml.load(articlesContent); - if (!data.articles || !Array.isArray(data.articles)) { - console.warn(`⚠️ No articles found in ${articlesFile}`); - return; - } - // Ensure content directory exists - if (!fs.existsSync(contentPath)) { - fs.mkdirSync(contentPath, { recursive: true }); - } - // Generate a page for each article - for (const article of data.articles) { - const pagePath = path.join(contentPath, article.path); - const pageFile = path.join(pagePath, '_index.md'); - // Create directory if needed - if (!fs.existsSync(pagePath)) { - fs.mkdirSync(pagePath, { recursive: true }); +function generatePagesFromArticleData(options) { + const { articlesPath, contentPath, menuKey, menuParent, productDescription, skipParentMenu, } = options; + const yaml = require('js-yaml'); + const articlesFile = path.join(articlesPath, 'articles.yml'); + if (!fs.existsSync(articlesFile)) { + console.warn(`⚠️ Articles file not found: ${articlesFile}`); + return; } - // Generate frontmatter - const frontmatter = { - title: article.fields.name || article.path, - description: `API reference for ${article.fields.name || article.path}`, - type: 'api', - staticFilePath: article.fields.staticFilePath, - weight: 100, - }; - const pageContent = `--- + // Read articles data + const articlesContent = fs.readFileSync(articlesFile, 'utf8'); + const data = yaml.load(articlesContent); + if (!data.articles || !Array.isArray(data.articles)) { + console.warn(`⚠️ No articles found in ${articlesFile}`); + return; + } + // Ensure content directory exists + if (!fs.existsSync(contentPath)) { + fs.mkdirSync(contentPath, { recursive: true }); + } + // Determine the API parent directory from the first article's path + // e.g., if article path is "api/v1/health", the API root is "api" + const firstArticlePath = data.articles[0]?.path || ''; + const apiRootDir = firstArticlePath.split('/')[0]; + // Generate parent _index.md for the API section + if (apiRootDir) { + const apiParentDir = path.join(contentPath, apiRootDir); + const parentIndexFile = path.join(apiParentDir, '_index.md'); + if (!fs.existsSync(apiParentDir)) { + fs.mkdirSync(apiParentDir, { recursive: true }); + } + if (!fs.existsSync(parentIndexFile)) { + const parentFrontmatter = { + title: menuParent || 'HTTP API', + description: productDescription || + 'API reference documentation for all available endpoints.', + weight: 104, + }; + // Add menu entry for parent page (unless skipParentMenu is true) + if (menuKey && !skipParentMenu) { + parentFrontmatter.menu = { + [menuKey]: { + name: menuParent || 'HTTP API', + }, + }; + } + const parentContent = `--- +${yaml.dump(parentFrontmatter)}--- +`; + fs.writeFileSync(parentIndexFile, parentContent); + console.log(`✓ Generated parent index at ${parentIndexFile}`); + } + } + // Generate a page for each article + for (const article of data.articles) { + const pagePath = path.join(contentPath, article.path); + const pageFile = path.join(pagePath, '_index.md'); + // Create directory if needed + if (!fs.existsSync(pagePath)) { + fs.mkdirSync(pagePath, { recursive: true }); + } + // Build frontmatter object + // Use menuName for display (actual endpoint path like /health) + // Fall back to name or path if menuName is not set + const displayName = article.fields.menuName || article.fields.name || article.path; + const frontmatter = { + title: displayName, + description: `API reference for ${displayName}`, + type: 'api', + // Use explicit layout to override Hugo's default section template lookup + // (Hugo's section lookup ignores `type`, so we need `layout` for the 3-column API layout) + layout: 'list', + staticFilePath: article.fields.staticFilePath, + weight: 100, + }; + // Add menu entry if menuKey is provided + // Use menuName for menu display (shows actual endpoint path like /health) + if (menuKey) { + frontmatter.menu = { + [menuKey]: { + name: displayName, + ...(menuParent && { parent: menuParent }), + }, + }; + } + // Add related links if present in article fields + if (article.fields.related && + Array.isArray(article.fields.related) && + article.fields.related.length > 0) { + frontmatter.related = article.fields.related; + } + // Add OpenAPI tags if present in article fields (for frontmatter metadata) + if (article.fields.apiTags && + Array.isArray(article.fields.apiTags) && + article.fields.apiTags.length > 0) { + frontmatter.api_tags = article.fields.apiTags; + } + const pageContent = `--- ${yaml.dump(frontmatter)}--- `; - fs.writeFileSync(pageFile, pageContent); - } - console.log( - `✓ Generated ${data.articles.length} content pages in ${contentPath}` - ); + fs.writeFileSync(pageFile, pageContent); + } + console.log(`✓ Generated ${data.articles.length} content pages in ${contentPath}`); +} +/** + * Generate Hugo content pages from tag-based article data + * + * Creates markdown files with frontmatter from article metadata. + * Each article becomes a page with type: api that renders via RapiDoc. + * Includes operation metadata for TOC generation. + * + * @param options - Generation options + */ +function generateTagPagesFromArticleData(options) { + const { articlesPath, contentPath, menuKey, menuParent, productDescription, skipParentMenu, } = options; + const yaml = require('js-yaml'); + const articlesFile = path.join(articlesPath, 'articles.yml'); + if (!fs.existsSync(articlesFile)) { + console.warn(`⚠️ Articles file not found: ${articlesFile}`); + return; + } + // Read articles data + const articlesContent = fs.readFileSync(articlesFile, 'utf8'); + const data = yaml.load(articlesContent); + if (!data.articles || !Array.isArray(data.articles)) { + console.warn(`⚠️ No articles found in ${articlesFile}`); + return; + } + // Ensure content directory exists + if (!fs.existsSync(contentPath)) { + fs.mkdirSync(contentPath, { recursive: true }); + } + // Generate parent _index.md for the API section + const apiParentDir = path.join(contentPath, 'api'); + const parentIndexFile = path.join(apiParentDir, '_index.md'); + if (!fs.existsSync(apiParentDir)) { + fs.mkdirSync(apiParentDir, { recursive: true }); + } + if (!fs.existsSync(parentIndexFile)) { + const parentFrontmatter = { + title: menuParent || 'HTTP API', + description: productDescription || + 'API reference documentation for all available endpoints.', + weight: 104, + }; + // Add menu entry for parent page (unless skipParentMenu is true) + if (menuKey && !skipParentMenu) { + parentFrontmatter.menu = { + [menuKey]: { + name: menuParent || 'HTTP API', + }, + }; + } + const parentContent = `--- +${yaml.dump(parentFrontmatter)}--- +`; + fs.writeFileSync(parentIndexFile, parentContent); + console.log(`✓ Generated parent index at ${parentIndexFile}`); + } + // Generate a page for each article (tag) + for (const article of data.articles) { + const pagePath = path.join(contentPath, article.path); + const pageFile = path.join(pagePath, '_index.md'); + // Create directory if needed + if (!fs.existsSync(pagePath)) { + fs.mkdirSync(pagePath, { recursive: true }); + } + // Build frontmatter object + const title = article.fields.title || article.fields.name || article.path; + const isConceptual = article.fields.isConceptual === true; + const frontmatter = { + title, + description: article.fields.description || `API reference for ${title}`, + type: 'api', + layout: isConceptual ? 'single' : 'list', + staticFilePath: article.fields.staticFilePath, + weight: 100, + // Tag-based fields + tag: article.fields.tag, + isConceptual, + menuGroup: article.fields.menuGroup, + }; + // Add operations for TOC generation (only for non-conceptual pages) + if (!isConceptual && article.fields.operations && article.fields.operations.length > 0) { + frontmatter.operations = article.fields.operations; + } + // Add tag description for conceptual pages + if (isConceptual && article.fields.tagDescription) { + frontmatter.tagDescription = article.fields.tagDescription; + } + // Note: We deliberately don't add menu entries for tag-based API pages. + // The API sidebar navigation (api/sidebar-nav.html) handles navigation + // for API reference pages, avoiding conflicts with existing menu items + // like "Query data" and "Write data" that exist in the main sidebar. + // Add related links if present in article fields + if (article.fields.related && + Array.isArray(article.fields.related) && + article.fields.related.length > 0) { + frontmatter.related = article.fields.related; + } + const pageContent = `--- +${yaml.dump(frontmatter)}--- +`; + fs.writeFileSync(pageFile, pageContent); + } + console.log(`✓ Generated ${data.articles.length} tag-based content pages in ${contentPath}`); } /** * Product configurations for all InfluxDB editions @@ -174,53 +326,61 @@ ${yaml.dump(frontmatter)}--- * Maps product identifiers to their OpenAPI specs and content directories */ const productConfigs = { - 'cloud-v2': { - specFile: path.join(API_DOCS_ROOT, 'influxdb/cloud/v2/ref.yml'), - pagesDir: path.join(DOCS_ROOT, 'content/influxdb/cloud/api/v2'), - description: 'InfluxDB Cloud (v2 API)', - }, - 'oss-v2': { - specFile: path.join(API_DOCS_ROOT, 'influxdb/v2/v2/ref.yml'), - pagesDir: path.join(DOCS_ROOT, 'content/influxdb/v2/api/v2'), - description: 'InfluxDB OSS v2', - }, - 'influxdb3-core': { - specFile: path.join(API_DOCS_ROOT, 'influxdb3/core/v3/ref.yml'), - pagesDir: path.join(DOCS_ROOT, 'content/influxdb3/core/reference/api'), - description: 'InfluxDB 3 Core', - }, - 'influxdb3-enterprise': { - specFile: path.join(API_DOCS_ROOT, 'influxdb3/enterprise/v3/ref.yml'), - pagesDir: path.join( - DOCS_ROOT, - 'content/influxdb3/enterprise/reference/api' - ), - description: 'InfluxDB 3 Enterprise', - }, - 'cloud-dedicated': { - specFile: path.join( - API_DOCS_ROOT, - 'influxdb3/cloud-dedicated/management/openapi.yml' - ), - pagesDir: path.join(DOCS_ROOT, 'content/influxdb3/cloud-dedicated/api'), - description: 'InfluxDB Cloud Dedicated', - }, - 'cloud-serverless': { - specFile: path.join( - API_DOCS_ROOT, - 'influxdb3/cloud-serverless/management/openapi.yml' - ), - pagesDir: path.join(DOCS_ROOT, 'content/influxdb3/cloud-serverless/api'), - description: 'InfluxDB Cloud Serverless', - }, - clustered: { - specFile: path.join( - API_DOCS_ROOT, - 'influxdb3/clustered/management/openapi.yml' - ), - pagesDir: path.join(DOCS_ROOT, 'content/influxdb3/clustered/api'), - description: 'InfluxDB Clustered', - }, + // TODO: v2 products (cloud-v2, oss-v2) are disabled for now because they + // have existing Redoc-based API reference at /reference/api/ + // Uncomment when ready to migrate v2 products to Scalar + // 'cloud-v2': { + // specFile: path.join(API_DOCS_ROOT, 'influxdb/cloud/v2/ref.yml'), + // pagesDir: path.join(DOCS_ROOT, 'content/influxdb/cloud/api'), + // description: 'InfluxDB Cloud (v2 API)', + // menuKey: 'influxdb_cloud', + // }, + // 'oss-v2': { + // specFile: path.join(API_DOCS_ROOT, 'influxdb/v2/v2/ref.yml'), + // pagesDir: path.join(DOCS_ROOT, 'content/influxdb/v2/api'), + // description: 'InfluxDB OSS v2', + // menuKey: 'influxdb_v2', + // }, + // InfluxDB 3 products use tag-based generation for better UX + 'influxdb3-core': { + specFile: path.join(API_DOCS_ROOT, 'influxdb3/core/v3/ref.yml'), + pagesDir: path.join(DOCS_ROOT, 'content/influxdb3/core'), + description: 'InfluxDB 3 Core', + menuKey: 'influxdb3_core', + useTagBasedGeneration: true, + }, + 'influxdb3-enterprise': { + specFile: path.join(API_DOCS_ROOT, 'influxdb3/enterprise/v3/ref.yml'), + pagesDir: path.join(DOCS_ROOT, 'content/influxdb3/enterprise'), + description: 'InfluxDB 3 Enterprise', + menuKey: 'influxdb3_enterprise', + useTagBasedGeneration: true, + }, + // Note: Cloud Dedicated, Serverless, and Clustered use management APIs + // with paths like /accounts/{accountId}/... so we put them under /api/ + // These products have existing /reference/api/ pages with menu entries, + // so we skip adding menu entries to the generated parent pages. + 'cloud-dedicated': { + specFile: path.join(API_DOCS_ROOT, 'influxdb3/cloud-dedicated/management/openapi.yml'), + pagesDir: path.join(DOCS_ROOT, 'content/influxdb3/cloud-dedicated/api'), + description: 'InfluxDB Cloud Dedicated', + menuKey: 'influxdb3_cloud_dedicated', + skipParentMenu: true, + }, + 'cloud-serverless': { + specFile: path.join(API_DOCS_ROOT, 'influxdb3/cloud-serverless/management/openapi.yml'), + pagesDir: path.join(DOCS_ROOT, 'content/influxdb3/cloud-serverless/api'), + description: 'InfluxDB Cloud Serverless', + menuKey: 'influxdb3_cloud_serverless', + skipParentMenu: true, + }, + clustered: { + specFile: path.join(API_DOCS_ROOT, 'influxdb3/clustered/management/openapi.yml'), + pagesDir: path.join(DOCS_ROOT, 'content/influxdb3/clustered/api'), + description: 'InfluxDB Clustered', + menuKey: 'influxdb3_clustered', + skipParentMenu: true, + }, }; exports.productConfigs = productConfigs; /** @@ -230,114 +390,127 @@ exports.productConfigs = productConfigs; * @param config - Product configuration */ function processProduct(productKey, config) { - console.log('\n' + '='.repeat(80)); - console.log(`Processing ${config.description || productKey}`); - console.log('='.repeat(80)); - const staticPath = path.join(DOCS_ROOT, 'static/openapi'); - const staticSpecPath = path.join(staticPath, `influxdb-${productKey}.yml`); - const staticJsonSpecPath = path.join( - staticPath, - `influxdb-${productKey}.json` - ); - const staticPathsPath = path.join(staticPath, `influxdb-${productKey}/paths`); - const articlesPath = path.join( - DOCS_ROOT, - `data/article-data/influxdb/${productKey}` - ); - // Check if spec file exists - if (!fs.existsSync(config.specFile)) { - console.warn(`⚠️ Spec file not found: ${config.specFile}`); - console.log('Skipping this product. Run getswagger.sh first if needed.\n'); - return; - } - try { - // Step 1: Execute the getswagger.sh script to fetch/bundle the spec - const getswaggerScript = path.join(API_DOCS_ROOT, 'getswagger.sh'); - if (fs.existsSync(getswaggerScript)) { - execCommand( - `${getswaggerScript} ${productKey} -B`, - `Fetching OpenAPI spec for ${productKey}` - ); - } else { - console.log(`⚠️ getswagger.sh not found, skipping fetch step`); + console.log('\n' + '='.repeat(80)); + console.log(`Processing ${config.description || productKey}`); + console.log('='.repeat(80)); + const staticPath = path.join(DOCS_ROOT, 'static/openapi'); + const staticSpecPath = path.join(staticPath, `influxdb-${productKey}.yml`); + const staticJsonSpecPath = path.join(staticPath, `influxdb-${productKey}.json`); + const staticPathsPath = path.join(staticPath, `influxdb-${productKey}/paths`); + const articlesPath = path.join(DOCS_ROOT, `data/article-data/influxdb/${productKey}`); + // Check if spec file exists + if (!fs.existsSync(config.specFile)) { + console.warn(`⚠️ Spec file not found: ${config.specFile}`); + console.log('Skipping this product. Run getswagger.sh first if needed.\n'); + return; } - // Step 2: Ensure static directory exists - if (!fs.existsSync(staticPath)) { - fs.mkdirSync(staticPath, { recursive: true }); + try { + // Step 1: Execute the getswagger.sh script to fetch/bundle the spec + // Note: getswagger.sh must run from api-docs/ because it uses relative paths + const getswaggerScript = path.join(API_DOCS_ROOT, 'getswagger.sh'); + if (fs.existsSync(getswaggerScript)) { + execCommand(`cd ${API_DOCS_ROOT} && ./getswagger.sh ${productKey} -B`, `Fetching OpenAPI spec for ${productKey}`); + } + else { + console.log(`⚠️ getswagger.sh not found, skipping fetch step`); + } + // Step 2: Ensure static directory exists + if (!fs.existsSync(staticPath)) { + fs.mkdirSync(staticPath, { recursive: true }); + } + // Step 3: Copy the generated OpenAPI spec to static folder (YAML) + if (fs.existsSync(config.specFile)) { + fs.copyFileSync(config.specFile, staticSpecPath); + console.log(`✓ Copied spec to ${staticSpecPath}`); + // Step 4: Generate JSON version of the spec + try { + const yaml = require('js-yaml'); + const specContent = fs.readFileSync(config.specFile, 'utf8'); + const specObject = yaml.load(specContent); + fs.writeFileSync(staticJsonSpecPath, JSON.stringify(specObject, null, 2)); + console.log(`✓ Generated JSON spec at ${staticJsonSpecPath}`); + } + catch (jsonError) { + console.warn(`⚠️ Could not generate JSON spec: ${jsonError}`); + } + } + // Step 5: Generate Hugo data from OpenAPI spec + if (config.useTagBasedGeneration) { + // Tag-based generation: group operations by OpenAPI tag + const staticTagsPath = path.join(staticPath, `influxdb-${productKey}/tags`); + console.log(`\n📋 Using tag-based generation for ${productKey}...`); + openapiPathsToHugo.generateHugoDataByTag({ + specFile: config.specFile, + dataOutPath: staticTagsPath, + articleOutPath: articlesPath, + includePaths: true, // Also generate path-based files for backwards compatibility + }); + // Step 6: Generate Hugo content pages from tag-based article data + generateTagPagesFromArticleData({ + articlesPath, + contentPath: config.pagesDir, + menuKey: config.menuKey, + menuParent: 'InfluxDB HTTP API', + skipParentMenu: config.skipParentMenu, + }); + } + else { + // Path-based generation: group paths by URL prefix (legacy) + generateDataFromOpenAPI(config.specFile, staticPathsPath, articlesPath); + // Step 6: Generate Hugo content pages from path-based article data + generatePagesFromArticleData({ + articlesPath, + contentPath: config.pagesDir, + menuKey: config.menuKey, + menuParent: 'InfluxDB HTTP API', + skipParentMenu: config.skipParentMenu, + }); + } + console.log(`\n✅ Successfully processed ${config.description || productKey}\n`); } - // Step 3: Copy the generated OpenAPI spec to static folder (YAML) - if (fs.existsSync(config.specFile)) { - fs.copyFileSync(config.specFile, staticSpecPath); - console.log(`✓ Copied spec to ${staticSpecPath}`); - // Step 4: Generate JSON version of the spec - try { - const yaml = require('js-yaml'); - const specContent = fs.readFileSync(config.specFile, 'utf8'); - const specObject = yaml.load(specContent); - fs.writeFileSync( - staticJsonSpecPath, - JSON.stringify(specObject, null, 2) - ); - console.log(`✓ Generated JSON spec at ${staticJsonSpecPath}`); - } catch (jsonError) { - console.warn(`⚠️ Could not generate JSON spec: ${jsonError}`); - } + catch (error) { + console.error(`\n❌ Error processing ${productKey}:`, error); + process.exit(1); } - // Step 5: Generate Hugo data from OpenAPI spec (path fragments for AI agents) - generateDataFromOpenAPI(config.specFile, staticPathsPath, articlesPath); - // Step 6: Generate Hugo content pages from article data - generatePagesFromArticleData(articlesPath, config.pagesDir); - console.log( - `\n✅ Successfully processed ${config.description || productKey}\n` - ); - } catch (error) { - console.error(`\n❌ Error processing ${productKey}:`, error); - process.exit(1); - } } /** * Main execution function */ function main() { - const args = process.argv.slice(2); - // Determine which products to process - let productsToProcess; - if (args.length === 0) { - // No arguments: process all products - productsToProcess = Object.keys(productConfigs); - console.log('\n📋 Processing all products...\n'); - } else { - // Arguments provided: process only specified products - productsToProcess = args; - console.log( - `\n📋 Processing specified products: ${productsToProcess.join(', ')}\n` - ); - } - // Validate product keys - const invalidProducts = productsToProcess.filter( - (key) => !productConfigs[key] - ); - if (invalidProducts.length > 0) { - console.error( - `\n❌ Invalid product identifier(s): ${invalidProducts.join(', ')}` - ); - console.error('\nValid products:'); - Object.keys(productConfigs).forEach((key) => { - console.error(` - ${key}: ${productConfigs[key].description}`); + const args = process.argv.slice(2); + // Determine which products to process + let productsToProcess; + if (args.length === 0) { + // No arguments: process all products + productsToProcess = Object.keys(productConfigs); + console.log('\n📋 Processing all products...\n'); + } + else { + // Arguments provided: process only specified products + productsToProcess = args; + console.log(`\n📋 Processing specified products: ${productsToProcess.join(', ')}\n`); + } + // Validate product keys + const invalidProducts = productsToProcess.filter((key) => !productConfigs[key]); + if (invalidProducts.length > 0) { + console.error(`\n❌ Invalid product identifier(s): ${invalidProducts.join(', ')}`); + console.error('\nValid products:'); + Object.keys(productConfigs).forEach((key) => { + console.error(` - ${key}: ${productConfigs[key].description}`); + }); + process.exit(1); + } + // Process each product + productsToProcess.forEach((productKey) => { + const config = productConfigs[productKey]; + processProduct(productKey, config); }); - process.exit(1); - } - // Process each product - productsToProcess.forEach((productKey) => { - const config = productConfigs[productKey]; - processProduct(productKey, config); - }); - console.log('\n' + '='.repeat(80)); - console.log('✅ All products processed successfully!'); - console.log('='.repeat(80) + '\n'); + console.log('\n' + '='.repeat(80)); + console.log('✅ All products processed successfully!'); + console.log('='.repeat(80) + '\n'); } // Execute if run directly if (require.main === module) { - main(); + main(); } -//# sourceMappingURL=generate-openapi-articles.js.map +//# sourceMappingURL=generate-openapi-articles.js.map \ No newline at end of file diff --git a/api-docs/scripts/dist/openapi-paths-to-hugo-data/index.js b/api-docs/scripts/dist/openapi-paths-to-hugo-data/index.js index 363643bc6f..cd977cfce3 100644 --- a/api-docs/scripts/dist/openapi-paths-to-hugo-data/index.js +++ b/api-docs/scripts/dist/openapi-paths-to-hugo-data/index.js @@ -1,4 +1,4 @@ -'use strict'; +"use strict"; /** * OpenAPI to Hugo Data Converter * @@ -7,55 +7,45 @@ * * @module openapi-paths-to-hugo-data */ -var __createBinding = - (this && this.__createBinding) || - (Object.create - ? function (o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if ( - !desc || - ('get' in desc ? !m.__esModule : desc.writable || desc.configurable) - ) { - desc = { - enumerable: true, - get: function () { - return m[k]; - }, - }; - } - Object.defineProperty(o, k2, desc); - } - : function (o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; - }); -var __setModuleDefault = - (this && this.__setModuleDefault) || - (Object.create - ? function (o, v) { - Object.defineProperty(o, 'default', { enumerable: true, value: v }); - } - : function (o, v) { - o['default'] = v; - }); -var __importStar = - (this && this.__importStar) || - function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) - for (var k in mod) - if (k !== 'default' && Object.prototype.hasOwnProperty.call(mod, k)) - __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; - }; -Object.defineProperty(exports, '__esModule', { value: true }); +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +Object.defineProperty(exports, "__esModule", { value: true }); +exports.generateHugoDataByTag = generateHugoDataByTag; exports.generateHugoData = generateHugoData; -const yaml = __importStar(require('js-yaml')); -const fs = __importStar(require('fs')); -const path = __importStar(require('path')); +const yaml = __importStar(require("js-yaml")); +const fs = __importStar(require("fs")); +const path = __importStar(require("path")); /** * Read a YAML file and parse it * @@ -64,8 +54,8 @@ const path = __importStar(require('path')); * @returns Parsed YAML content */ function readFile(filepath, encoding = 'utf8') { - const content = fs.readFileSync(filepath, encoding); - return yaml.load(content); + const content = fs.readFileSync(filepath, encoding); + return yaml.load(content); } /** * Write data to a YAML file @@ -74,7 +64,7 @@ function readFile(filepath, encoding = 'utf8') { * @param outputTo - Output file path */ function writeDataFile(data, outputTo) { - fs.writeFileSync(outputTo, yaml.dump(data)); + fs.writeFileSync(outputTo, yaml.dump(data)); } /** * Write data to a JSON file @@ -83,23 +73,196 @@ function writeDataFile(data, outputTo) { * @param outputTo - Output file path */ function writeJsonFile(data, outputTo) { - fs.writeFileSync(outputTo, JSON.stringify(data, null, 2)); + fs.writeFileSync(outputTo, JSON.stringify(data, null, 2)); } /** * OpenAPI utility functions */ const openapiUtils = { - /** - * Check if a path fragment is a placeholder (e.g., {id}) - * - * @param str - Path fragment to check - * @returns True if the fragment is a placeholder - */ - isPlaceholderFragment(str) { - const placeholderRegex = /^\{.*\}$/; - return placeholderRegex.test(str); - }, + /** + * Check if a path fragment is a placeholder (e.g., {id}) + * + * @param str - Path fragment to check + * @returns True if the fragment is a placeholder + */ + isPlaceholderFragment(str) { + const placeholderRegex = /^\{.*\}$/; + return placeholderRegex.test(str); + }, }; +/** + * Convert tag name to URL-friendly slug + * + * @param tagName - Tag name (e.g., "Write data", "Processing engine") + * @returns URL-friendly slug (e.g., "write-data", "processing-engine") + */ +function slugifyTag(tagName) { + return tagName + .toLowerCase() + .replace(/[^a-z0-9]+/g, '-') + .replace(/^-|-$/g, ''); +} +/** + * Menu group mappings for tag-based navigation + * Maps OpenAPI tags to sidebar groups + */ +const TAG_MENU_GROUPS = { + // Concepts group + 'Quick start': 'Concepts', + 'Authentication': 'Concepts', + 'Headers and parameters': 'Concepts', + 'Response codes': 'Concepts', + // Data Operations group + 'Write data': 'Data Operations', + 'Query data': 'Data Operations', + 'Cache data': 'Data Operations', + // Administration group + 'Database': 'Administration', + 'Table': 'Administration', + 'Token': 'Administration', + // Processing Engine group + 'Processing engine': 'Processing Engine', + // Server group + 'Server information': 'Server', + // Compatibility group + 'Compatibility endpoints': 'Compatibility', +}; +/** + * Get menu group for a tag + * + * @param tagName - Tag name + * @returns Menu group name or 'Other' if not mapped + */ +function getMenuGroupForTag(tagName) { + return TAG_MENU_GROUPS[tagName] || 'Other'; +} +/** + * HTTP methods to check for operations + */ +const HTTP_METHODS = [ + 'get', + 'post', + 'put', + 'patch', + 'delete', + 'options', + 'head', + 'trace', +]; +/** + * Extract all operations from an OpenAPI document grouped by tag + * + * @param openapi - OpenAPI document + * @returns Map of tag name to operations with that tag + */ +function extractOperationsByTag(openapi) { + const tagOperations = new Map(); + Object.entries(openapi.paths).forEach(([pathKey, pathItem]) => { + HTTP_METHODS.forEach((method) => { + const operation = pathItem[method]; + if (operation) { + const opMeta = { + operationId: operation.operationId || `${method}-${pathKey}`, + method: method.toUpperCase(), + path: pathKey, + summary: operation.summary || '', + tags: operation.tags || [], + }; + // Add operation to each of its tags + (operation.tags || []).forEach((tag) => { + if (!tagOperations.has(tag)) { + tagOperations.set(tag, []); + } + tagOperations.get(tag).push(opMeta); + }); + } + }); + }); + return tagOperations; +} +/** + * Write OpenAPI specs grouped by tag to separate files + * Generates both YAML and JSON versions per tag + * + * @param openapi - OpenAPI document + * @param prefix - Filename prefix for output files + * @param outPath - Output directory path + */ +function writeTagOpenapis(openapi, prefix, outPath) { + const tagOperations = extractOperationsByTag(openapi); + // Process each tag + tagOperations.forEach((operations, tagName) => { + // Deep copy openapi + const doc = JSON.parse(JSON.stringify(openapi)); + // Filter paths to only include those with operations for this tag + const filteredPaths = {}; + Object.entries(openapi.paths).forEach(([pathKey, pathItem]) => { + const filteredPathItem = {}; + let hasOperations = false; + HTTP_METHODS.forEach((method) => { + const operation = pathItem[method]; + if (operation?.tags?.includes(tagName)) { + filteredPathItem[method] = operation; + hasOperations = true; + } + }); + // Include path-level parameters if we have operations + if (hasOperations) { + if (pathItem.parameters) { + filteredPathItem.parameters = pathItem.parameters; + } + filteredPaths[pathKey] = filteredPathItem; + } + }); + doc.paths = filteredPaths; + // Filter tags to only include this tag (and trait tags for context) + if (doc.tags) { + doc.tags = doc.tags.filter((tag) => tag.name === tagName || tag['x-traitTag']); + } + // Update info + const tagSlug = slugifyTag(tagName); + doc.info.title = tagName; + doc.info.description = `API reference for ${tagName}`; + doc['x-tagGroup'] = tagName; + try { + if (!fs.existsSync(outPath)) { + fs.mkdirSync(outPath, { recursive: true }); + } + const baseFilename = `${prefix}${tagSlug}`; + const yamlPath = path.resolve(outPath, `${baseFilename}.yaml`); + const jsonPath = path.resolve(outPath, `${baseFilename}.json`); + writeDataFile(doc, yamlPath); + writeJsonFile(doc, jsonPath); + console.log(`Generated tag spec: ${baseFilename}.yaml (${Object.keys(filteredPaths).length} paths, ${operations.length} operations)`); + } + catch (err) { + console.error(`Error writing tag group ${tagName}:`, err); + } + }); + // Also create specs for conceptual tags (x-traitTag) without operations + (openapi.tags || []).forEach((tag) => { + if (tag['x-traitTag'] && !tagOperations.has(tag.name)) { + const doc = JSON.parse(JSON.stringify(openapi)); + doc.paths = {}; + doc.tags = [tag]; + doc.info.title = tag.name; + doc.info.description = tag.description || `API reference for ${tag.name}`; + doc['x-tagGroup'] = tag.name; + const tagSlug = slugifyTag(tag.name); + try { + const baseFilename = `${prefix}${tagSlug}`; + const yamlPath = path.resolve(outPath, `${baseFilename}.yaml`); + const jsonPath = path.resolve(outPath, `${baseFilename}.json`); + writeDataFile(doc, yamlPath); + writeJsonFile(doc, jsonPath); + console.log(`Generated conceptual tag spec: ${baseFilename}.yaml`); + } + catch (err) { + console.error(`Error writing conceptual tag ${tag.name}:`, err); + } + } + }); +} /** * Write OpenAPI specs grouped by path to separate files * Generates both YAML and JSON versions @@ -109,51 +272,79 @@ const openapiUtils = { * @param outPath - Output directory path */ function writePathOpenapis(openapi, prefix, outPath) { - const pathGroups = {}; - // Group paths by their base path (first 3-4 segments, excluding placeholders) - Object.keys(openapi.paths) - .sort() - .forEach((p) => { - const delimiter = '/'; - let key = p.split(delimiter); - // Check if this is an item path (ends with a placeholder) - let isItemPath = openapiUtils.isPlaceholderFragment(key[key.length - 1]); - if (isItemPath) { - key = key.slice(0, -1); - } - // Take first 4 segments - key = key.slice(0, 4); - // Check if the last segment is still a placeholder - isItemPath = openapiUtils.isPlaceholderFragment(key[key.length - 1]); - if (isItemPath) { - key = key.slice(0, -1); - } - const groupKey = key.join('/'); - pathGroups[groupKey] = pathGroups[groupKey] || {}; - pathGroups[groupKey][p] = openapi.paths[p]; + const pathGroups = {}; + // Group paths by their base path (first 3-4 segments, excluding placeholders) + Object.keys(openapi.paths) + .sort() + .forEach((p) => { + const delimiter = '/'; + let key = p.split(delimiter); + // Check if this is an item path (ends with a placeholder) + let isItemPath = openapiUtils.isPlaceholderFragment(key[key.length - 1]); + if (isItemPath) { + key = key.slice(0, -1); + } + // Take first 4 segments + key = key.slice(0, 4); + // Check if the last segment is still a placeholder + isItemPath = openapiUtils.isPlaceholderFragment(key[key.length - 1]); + if (isItemPath) { + key = key.slice(0, -1); + } + const groupKey = key.join('/'); + pathGroups[groupKey] = pathGroups[groupKey] || {}; + pathGroups[groupKey][p] = openapi.paths[p]; + }); + // Write each path group to separate YAML and JSON files + Object.keys(pathGroups).forEach((pg) => { + // Deep copy openapi + const doc = JSON.parse(JSON.stringify(openapi)); + doc.paths = pathGroups[pg]; + // Collect tags used by operations in this path group + const usedTags = new Set(); + Object.values(doc.paths).forEach((pathItem) => { + const httpMethods = [ + 'get', + 'post', + 'put', + 'patch', + 'delete', + 'options', + 'head', + 'trace', + ]; + httpMethods.forEach((method) => { + const operation = pathItem[method]; + if (operation?.tags) { + operation.tags.forEach((tag) => usedTags.add(tag)); + } + }); + }); + // Filter tags to only include those used by operations in this path group + // Exclude x-traitTag tags (supplementary documentation tags) + if (doc.tags) { + doc.tags = doc.tags.filter((tag) => usedTags.has(tag.name) && !tag['x-traitTag']); + } + // Simplify info for path-specific docs + doc.info.title = pg; + doc.info.description = `API reference for ${pg}`; + doc['x-pathGroup'] = pg; + try { + if (!fs.existsSync(outPath)) { + fs.mkdirSync(outPath, { recursive: true }); + } + const baseFilename = `${prefix}${pg.replaceAll('/', '-').replace(/^-/, '')}`; + const yamlPath = path.resolve(outPath, `${baseFilename}.yaml`); + const jsonPath = path.resolve(outPath, `${baseFilename}.json`); + // Write both YAML and JSON versions + writeDataFile(doc, yamlPath); + writeJsonFile(doc, jsonPath); + console.log(`Generated: ${baseFilename}.yaml and ${baseFilename}.json`); + } + catch (err) { + console.error(`Error writing path group ${pg}:`, err); + } }); - // Write each path group to separate YAML and JSON files - Object.keys(pathGroups).forEach((pg) => { - // Deep copy openapi - const doc = JSON.parse(JSON.stringify(openapi)); - doc.paths = pathGroups[pg]; - doc.info.title = `${pg}\n${doc.info.title}`; - doc['x-pathGroup'] = pg; - try { - if (!fs.existsSync(outPath)) { - fs.mkdirSync(outPath, { recursive: true }); - } - const baseFilename = `${prefix}${pg.replaceAll('/', '-').replace(/^-/, '')}`; - const yamlPath = path.resolve(outPath, `${baseFilename}.yaml`); - const jsonPath = path.resolve(outPath, `${baseFilename}.json`); - // Write both YAML and JSON versions - writeDataFile(doc, yamlPath); - writeJsonFile(doc, jsonPath); - console.log(`Generated: ${baseFilename}.yaml and ${baseFilename}.json`); - } catch (err) { - console.error(`Error writing path group ${pg}:`, err); - } - }); } /** * Create article metadata for a path group @@ -162,33 +353,107 @@ function writePathOpenapis(openapi, prefix, outPath) { * @returns Article metadata object */ function createArticleDataForPathGroup(openapi) { - const article = { - path: '', - fields: { - name: openapi['x-pathGroup'] || '', - describes: Object.keys(openapi.paths), - }, - }; - /** - * Convert path to snake case for article path - * - * @param p - Path to convert - * @returns Snake-cased path - */ - const snakifyPath = (p) => { - if (!p) { - return ''; + const article = { + path: '', + fields: { + name: openapi['x-pathGroup'] || '', + describes: Object.keys(openapi.paths), + }, + }; + /** + * Convert OpenAPI path to Hugo-friendly article path + * Legacy endpoints (without /api/ prefix) go under api/ directly + * Versioned endpoints (with /api/vN/) keep their structure + * + * @param p - Path to convert (e.g., '/health', '/api/v3/query_sql') + * @returns Path suitable for Hugo content directory (e.g., 'api/health', 'api/v3/query_sql') + */ + const toHugoPath = (p) => { + if (!p) { + return ''; + } + // If path doesn't start with /api/, it's a legacy endpoint + // Place it directly under api/ to avoid collision with /api/v1/* paths + if (!p.startsWith('/api/')) { + // /health -> api/health + // /write -> api/write + return `api${p}`; + } + // /api/v1/health -> api/v1/health + // /api/v2/write -> api/v2/write + // /api/v3/query_sql -> api/v3/query_sql + return p.replace(/^\//, ''); + }; + /** + * Convert path to tag-friendly format (dashes instead of slashes) + * + * @param p - Path to convert + * @returns Tag-friendly path + */ + const toTagPath = (p) => { + if (!p) { + return ''; + } + return p.replace(/^\//, '').replaceAll('/', '-'); + }; + const pathGroup = openapi['x-pathGroup'] || ''; + article.path = toHugoPath(pathGroup); + // Store original path for menu display (shows actual endpoint path) + article.fields.menuName = pathGroup; + article.fields.title = openapi.info?.title; + article.fields.description = openapi.description; + const pathGroupFrags = path.parse(openapi['x-pathGroup'] || ''); + article.fields.tags = [pathGroupFrags?.dir, pathGroupFrags?.name] + .filter(Boolean) + .map((t) => toTagPath(t)); + // Extract x-relatedLinks and OpenAPI tags from path items or operations + const relatedLinks = []; + const apiTags = []; + const httpMethods = [ + 'get', + 'post', + 'put', + 'patch', + 'delete', + 'options', + 'head', + 'trace', + ]; + Object.values(openapi.paths).forEach((pathItem) => { + // Check path-level x-relatedLinks + if (pathItem['x-relatedLinks'] && + Array.isArray(pathItem['x-relatedLinks'])) { + relatedLinks.push(...pathItem['x-relatedLinks'].filter((link) => !relatedLinks.includes(link))); + } + // Check operation-level x-relatedLinks and tags + httpMethods.forEach((method) => { + const operation = pathItem[method]; + if (operation) { + // Extract x-relatedLinks + if (operation['x-relatedLinks'] && + Array.isArray(operation['x-relatedLinks'])) { + relatedLinks.push(...operation['x-relatedLinks'].filter((link) => !relatedLinks.includes(link))); + } + // Extract OpenAPI tags from operation + if (operation.tags && Array.isArray(operation.tags)) { + operation.tags.forEach((tag) => { + if (!apiTags.includes(tag)) { + apiTags.push(tag); + } + }); + } + } + }); + }); + // Only add related if there are links + if (relatedLinks.length > 0) { + article.fields.related = relatedLinks; + } + // Add OpenAPI tags from operations (for Hugo frontmatter) + if (apiTags.length > 0) { + article.fields.apiTags = apiTags; } - return p.replace(/^\//, '').replaceAll('/', '-'); - }; - article.path = snakifyPath(openapi['x-pathGroup'] || ''); - article.fields.title = openapi.info?.title; - article.fields.description = openapi.description; - const pathGroupFrags = path.parse(openapi['x-pathGroup'] || ''); - article.fields.tags = [pathGroupFrags?.dir, pathGroupFrags?.name] - .filter(Boolean) - .map((t) => snakifyPath(t)); - return article; + return article; } /** * Write OpenAPI article metadata to Hugo data files @@ -199,50 +464,187 @@ function createArticleDataForPathGroup(openapi) { * @param opts - Options including file pattern filter */ function writeOpenapiArticleData(sourcePath, targetPath, opts) { - /** - * Check if path is a file - */ - const isFile = (filePath) => { - return fs.lstatSync(filePath).isFile(); - }; - /** - * Check if filename matches pattern - */ - const matchesPattern = (filePath) => { - return opts.filePattern - ? path.parse(filePath).name.startsWith(opts.filePattern) - : true; - }; - try { - const articles = fs - .readdirSync(sourcePath) - .map((fileName) => path.join(sourcePath, fileName)) - .filter(matchesPattern) - .filter(isFile) - .filter( - (filePath) => filePath.endsWith('.yaml') || filePath.endsWith('.yml') - ) // Only process YAML files - .map((filePath) => { - const openapi = readFile(filePath); - const article = createArticleDataForPathGroup(openapi); - article.fields.source = filePath; - // Hugo omits "/static" from the URI when serving files stored in "./static" - article.fields.staticFilePath = filePath.replace(/^static\//, '/'); - return article; - }); - if (!fs.existsSync(targetPath)) { - fs.mkdirSync(targetPath, { recursive: true }); + /** + * Check if path is a file + */ + const isFile = (filePath) => { + return fs.lstatSync(filePath).isFile(); + }; + /** + * Check if filename matches pattern + */ + const matchesPattern = (filePath) => { + return opts.filePattern + ? path.parse(filePath).name.startsWith(opts.filePattern) + : true; + }; + try { + const articles = fs + .readdirSync(sourcePath) + .map((fileName) => path.join(sourcePath, fileName)) + .filter(matchesPattern) + .filter(isFile) + .filter((filePath) => filePath.endsWith('.yaml') || filePath.endsWith('.yml')) // Only process YAML files + .map((filePath) => { + const openapi = readFile(filePath); + const article = createArticleDataForPathGroup(openapi); + article.fields.source = filePath; + // Hugo omits "/static" from the URI when serving files stored in "./static" + article.fields.staticFilePath = filePath.replace(/^static\//, '/'); + return article; + }); + if (!fs.existsSync(targetPath)) { + fs.mkdirSync(targetPath, { recursive: true }); + } + const articleCollection = { articles }; + // Write both YAML and JSON versions + const yamlPath = path.resolve(targetPath, 'articles.yml'); + const jsonPath = path.resolve(targetPath, 'articles.json'); + writeDataFile(articleCollection, yamlPath); + writeJsonFile(articleCollection, jsonPath); + console.log(`Generated ${articles.length} articles in ${targetPath}`); } - const articleCollection = { articles }; - // Write both YAML and JSON versions - const yamlPath = path.resolve(targetPath, 'articles.yml'); - const jsonPath = path.resolve(targetPath, 'articles.json'); - writeDataFile(articleCollection, yamlPath); - writeJsonFile(articleCollection, jsonPath); - console.log(`Generated ${articles.length} articles in ${targetPath}`); - } catch (e) { - console.error('Error writing article data:', e); - } + catch (e) { + console.error('Error writing article data:', e); + } +} +/** + * Create article data for a tag-based grouping + * + * @param openapi - OpenAPI document with x-tagGroup + * @param operations - Operations for this tag + * @param tagMeta - Tag metadata from OpenAPI spec + * @returns Article metadata object + */ +function createArticleDataForTag(openapi, operations, tagMeta) { + const tagName = openapi['x-tagGroup'] || ''; + const tagSlug = slugifyTag(tagName); + const isConceptual = tagMeta?.['x-traitTag'] === true; + const article = { + path: `api/${tagSlug}`, + fields: { + name: tagName, + describes: Object.keys(openapi.paths), + title: tagName, + description: tagMeta?.description || openapi.info?.description || `API reference for ${tagName}`, + tag: tagName, + isConceptual, + menuGroup: getMenuGroupForTag(tagName), + operations: operations.map((op) => ({ + operationId: op.operationId, + method: op.method, + path: op.path, + summary: op.summary, + tags: op.tags, + })), + }, + }; + // Add tag description for conceptual pages + if (tagMeta?.description) { + article.fields.tagDescription = tagMeta.description; + } + return article; +} +/** + * Write tag-based OpenAPI article metadata to Hugo data files + * Generates articles.yml and articles.json + * + * @param sourcePath - Path to directory containing tag-based OpenAPI fragment files + * @param targetPath - Output path for article data + * @param openapi - Original OpenAPI document (for tag metadata) + * @param opts - Options including file pattern filter + */ +function writeOpenapiTagArticleData(sourcePath, targetPath, openapi, opts) { + const isFile = (filePath) => { + return fs.lstatSync(filePath).isFile(); + }; + const matchesPattern = (filePath) => { + return opts.filePattern + ? path.parse(filePath).name.startsWith(opts.filePattern) + : true; + }; + // Create tag metadata lookup + const tagMetaMap = new Map(); + (openapi.tags || []).forEach((tag) => { + tagMetaMap.set(tag.name, tag); + }); + try { + const articles = fs + .readdirSync(sourcePath) + .map((fileName) => path.join(sourcePath, fileName)) + .filter(matchesPattern) + .filter(isFile) + .filter((filePath) => filePath.endsWith('.yaml') || filePath.endsWith('.yml')) + .map((filePath) => { + const tagOpenapi = readFile(filePath); + const tagName = tagOpenapi['x-tagGroup'] || tagOpenapi.info?.title || ''; + const tagMeta = tagMetaMap.get(tagName); + // Extract operations from the tag-filtered spec + const operations = []; + Object.entries(tagOpenapi.paths).forEach(([pathKey, pathItem]) => { + HTTP_METHODS.forEach((method) => { + const operation = pathItem[method]; + if (operation) { + operations.push({ + operationId: operation.operationId || `${method}-${pathKey}`, + method: method.toUpperCase(), + path: pathKey, + summary: operation.summary || '', + tags: operation.tags || [], + }); + } + }); + }); + const article = createArticleDataForTag(tagOpenapi, operations, tagMeta); + article.fields.source = filePath; + article.fields.staticFilePath = filePath.replace(/^static\//, '/'); + return article; + }); + if (!fs.existsSync(targetPath)) { + fs.mkdirSync(targetPath, { recursive: true }); + } + const articleCollection = { articles }; + // Write both YAML and JSON versions + const yamlPath = path.resolve(targetPath, 'articles.yml'); + const jsonPath = path.resolve(targetPath, 'articles.json'); + writeDataFile(articleCollection, yamlPath); + writeJsonFile(articleCollection, jsonPath); + console.log(`Generated ${articles.length} tag-based articles in ${targetPath}`); + } + catch (e) { + console.error('Error writing tag article data:', e); + } +} +/** + * Generate Hugo data files from an OpenAPI specification grouped by tag + * + * This function: + * 1. Reads the OpenAPI spec file + * 2. Groups operations by their OpenAPI tags + * 3. Writes each tag group to separate YAML and JSON files + * 4. Generates tag-based article metadata for Hugo + * + * @param options - Generation options + */ +function generateHugoDataByTag(options) { + const filenamePrefix = `${path.parse(options.specFile).name}-`; + const sourceFile = readFile(options.specFile, 'utf8'); + // Optionally generate path-based files for backwards compatibility + if (options.includePaths) { + console.log(`\nGenerating OpenAPI path files in ${options.dataOutPath}....`); + writePathOpenapis(sourceFile, filenamePrefix, options.dataOutPath); + } + // Generate tag-based files + const tagOutPath = options.includePaths + ? path.join(options.dataOutPath, 'tags') + : options.dataOutPath; + console.log(`\nGenerating OpenAPI tag files in ${tagOutPath}....`); + writeTagOpenapis(sourceFile, filenamePrefix, tagOutPath); + console.log(`\nGenerating OpenAPI tag article data in ${options.articleOutPath}...`); + writeOpenapiTagArticleData(tagOutPath, options.articleOutPath, sourceFile, { + filePattern: filenamePrefix, + }); + console.log('\nTag-based generation complete!\n'); } /** * Generate Hugo data files from an OpenAPI specification @@ -256,20 +658,19 @@ function writeOpenapiArticleData(sourcePath, targetPath, opts) { * @param options - Generation options */ function generateHugoData(options) { - const filenamePrefix = `${path.parse(options.specFile).name}-`; - const sourceFile = readFile(options.specFile, 'utf8'); - console.log(`\nGenerating OpenAPI path files in ${options.dataOutPath}....`); - writePathOpenapis(sourceFile, filenamePrefix, options.dataOutPath); - console.log( - `\nGenerating OpenAPI article data in ${options.articleOutPath}...` - ); - writeOpenapiArticleData(options.dataOutPath, options.articleOutPath, { - filePattern: filenamePrefix, - }); - console.log('\nGeneration complete!\n'); + const filenamePrefix = `${path.parse(options.specFile).name}-`; + const sourceFile = readFile(options.specFile, 'utf8'); + console.log(`\nGenerating OpenAPI path files in ${options.dataOutPath}....`); + writePathOpenapis(sourceFile, filenamePrefix, options.dataOutPath); + console.log(`\nGenerating OpenAPI article data in ${options.articleOutPath}...`); + writeOpenapiArticleData(options.dataOutPath, options.articleOutPath, { + filePattern: filenamePrefix, + }); + console.log('\nGeneration complete!\n'); } // CommonJS export for backward compatibility module.exports = { - generateHugoData, + generateHugoData, + generateHugoDataByTag, }; -//# sourceMappingURL=index.js.map +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/api-docs/scripts/generate-openapi-articles.ts b/api-docs/scripts/generate-openapi-articles.ts index c48c9e9a38..a0b83af027 100644 --- a/api-docs/scripts/generate-openapi-articles.ts +++ b/api-docs/scripts/generate-openapi-articles.ts @@ -27,6 +27,17 @@ import * as fs from 'fs'; // Import the OpenAPI to Hugo converter const openapiPathsToHugo = require('./openapi-paths-to-hugo-data/index.js'); +/** + * Operation metadata structure from tag-based articles + */ +interface OperationMeta { + operationId: string; + method: string; + path: string; + summary: string; + tags: string[]; +} + /** * Product configuration for API generation */ @@ -37,6 +48,12 @@ interface ProductConfig { pagesDir: string; /** Optional description of the product */ description?: string; + /** Hugo menu identifier for this product (e.g., 'influxdb3_core') */ + menuKey?: string; + /** Skip adding menu entry to generated parent page (use when existing reference page has menu entry) */ + skipParentMenu?: boolean; + /** Use tag-based generation instead of path-based (default: false) */ + useTagBasedGeneration?: boolean; } /** @@ -94,19 +111,41 @@ function generateDataFromOpenAPI( }); } +/** + * Options for generating pages from article data + */ +interface GeneratePagesOptions { + /** Path to the articles data directory */ + articlesPath: string; + /** Output path for generated content pages */ + contentPath: string; + /** Hugo menu identifier for navigation (e.g., 'influxdb3_core') */ + menuKey?: string; + /** Parent menu item name (e.g., 'InfluxDB HTTP API') */ + menuParent?: string; + /** Product description for the parent page */ + productDescription?: string; + /** Skip adding menu entry to generated parent page */ + skipParentMenu?: boolean; +} + /** * Generate Hugo content pages from article data * * Creates markdown files with frontmatter from article metadata. * Each article becomes a page with type: api that renders via Scalar. * - * @param articlesPath - Path to the articles data directory - * @param contentPath - Output path for generated content pages + * @param options - Generation options */ -function generatePagesFromArticleData( - articlesPath: string, - contentPath: string -): void { +function generatePagesFromArticleData(options: GeneratePagesOptions): void { + const { + articlesPath, + contentPath, + menuKey, + menuParent, + productDescription, + skipParentMenu, + } = options; const yaml = require('js-yaml'); const articlesFile = path.join(articlesPath, 'articles.yml'); @@ -118,7 +157,10 @@ function generatePagesFromArticleData( // Read articles data const articlesContent = fs.readFileSync(articlesFile, 'utf8'); const data = yaml.load(articlesContent) as { - articles: Array<{ path: string; fields: Record }>; + articles: Array<{ + path: string; + fields: Record; + }>; }; if (!data.articles || !Array.isArray(data.articles)) { @@ -131,6 +173,47 @@ function generatePagesFromArticleData( fs.mkdirSync(contentPath, { recursive: true }); } + // Determine the API parent directory from the first article's path + // e.g., if article path is "api/v1/health", the API root is "api" + const firstArticlePath = data.articles[0]?.path || ''; + const apiRootDir = firstArticlePath.split('/')[0]; + + // Generate parent _index.md for the API section + if (apiRootDir) { + const apiParentDir = path.join(contentPath, apiRootDir); + const parentIndexFile = path.join(apiParentDir, '_index.md'); + + if (!fs.existsSync(apiParentDir)) { + fs.mkdirSync(apiParentDir, { recursive: true }); + } + + if (!fs.existsSync(parentIndexFile)) { + const parentFrontmatter: Record = { + title: menuParent || 'HTTP API', + description: + productDescription || + 'API reference documentation for all available endpoints.', + weight: 104, + }; + + // Add menu entry for parent page (unless skipParentMenu is true) + if (menuKey && !skipParentMenu) { + parentFrontmatter.menu = { + [menuKey]: { + name: menuParent || 'HTTP API', + }, + }; + } + + const parentContent = `--- +${yaml.dump(parentFrontmatter)}--- +`; + + fs.writeFileSync(parentIndexFile, parentContent); + console.log(`✓ Generated parent index at ${parentIndexFile}`); + } + } + // Generate a page for each article for (const article of data.articles) { const pagePath = path.join(contentPath, article.path); @@ -141,15 +224,51 @@ function generatePagesFromArticleData( fs.mkdirSync(pagePath, { recursive: true }); } - // Generate frontmatter - const frontmatter = { - title: article.fields.name || article.path, - description: `API reference for ${article.fields.name || article.path}`, + // Build frontmatter object + // Use menuName for display (actual endpoint path like /health) + // Fall back to name or path if menuName is not set + const displayName = + article.fields.menuName || article.fields.name || article.path; + const frontmatter: Record = { + title: displayName, + description: `API reference for ${displayName}`, type: 'api', + // Use explicit layout to override Hugo's default section template lookup + // (Hugo's section lookup ignores `type`, so we need `layout` for the 3-column API layout) + layout: 'list', staticFilePath: article.fields.staticFilePath, weight: 100, }; + // Add menu entry if menuKey is provided + // Use menuName for menu display (shows actual endpoint path like /health) + if (menuKey) { + frontmatter.menu = { + [menuKey]: { + name: displayName, + ...(menuParent && { parent: menuParent }), + }, + }; + } + + // Add related links if present in article fields + if ( + article.fields.related && + Array.isArray(article.fields.related) && + article.fields.related.length > 0 + ) { + frontmatter.related = article.fields.related; + } + + // Add OpenAPI tags if present in article fields (for frontmatter metadata) + if ( + article.fields.apiTags && + Array.isArray(article.fields.apiTags) && + article.fields.apiTags.length > 0 + ) { + frontmatter.api_tags = article.fields.apiTags; + } + const pageContent = `--- ${yaml.dump(frontmatter)}--- `; @@ -162,35 +281,217 @@ ${yaml.dump(frontmatter)}--- ); } +/** + * Options for generating tag-based pages from article data + */ +interface GenerateTagPagesOptions { + /** Path to the articles data directory */ + articlesPath: string; + /** Output path for generated content pages */ + contentPath: string; + /** Hugo menu identifier for navigation (e.g., 'influxdb3_core') */ + menuKey?: string; + /** Parent menu item name (e.g., 'InfluxDB HTTP API') */ + menuParent?: string; + /** Product description for the parent page */ + productDescription?: string; + /** Skip adding menu entry to generated parent page */ + skipParentMenu?: boolean; +} + +/** + * Generate Hugo content pages from tag-based article data + * + * Creates markdown files with frontmatter from article metadata. + * Each article becomes a page with type: api that renders via RapiDoc. + * Includes operation metadata for TOC generation. + * + * @param options - Generation options + */ +function generateTagPagesFromArticleData(options: GenerateTagPagesOptions): void { + const { + articlesPath, + contentPath, + menuKey, + menuParent, + productDescription, + skipParentMenu, + } = options; + const yaml = require('js-yaml'); + const articlesFile = path.join(articlesPath, 'articles.yml'); + + if (!fs.existsSync(articlesFile)) { + console.warn(`⚠️ Articles file not found: ${articlesFile}`); + return; + } + + // Read articles data + const articlesContent = fs.readFileSync(articlesFile, 'utf8'); + const data = yaml.load(articlesContent) as { + articles: Array<{ + path: string; + fields: { + name?: string; + title?: string; + description?: string; + tag?: string; + isConceptual?: boolean; + tagDescription?: string; + menuGroup?: string; + staticFilePath?: string; + operations?: OperationMeta[]; + related?: string[]; + }; + }>; + }; + + if (!data.articles || !Array.isArray(data.articles)) { + console.warn(`⚠️ No articles found in ${articlesFile}`); + return; + } + + // Ensure content directory exists + if (!fs.existsSync(contentPath)) { + fs.mkdirSync(contentPath, { recursive: true }); + } + + // Generate parent _index.md for the API section + const apiParentDir = path.join(contentPath, 'api'); + const parentIndexFile = path.join(apiParentDir, '_index.md'); + + if (!fs.existsSync(apiParentDir)) { + fs.mkdirSync(apiParentDir, { recursive: true }); + } + + if (!fs.existsSync(parentIndexFile)) { + const parentFrontmatter: Record = { + title: menuParent || 'HTTP API', + description: + productDescription || + 'API reference documentation for all available endpoints.', + weight: 104, + }; + + // Add menu entry for parent page (unless skipParentMenu is true) + if (menuKey && !skipParentMenu) { + parentFrontmatter.menu = { + [menuKey]: { + name: menuParent || 'HTTP API', + }, + }; + } + + const parentContent = `--- +${yaml.dump(parentFrontmatter)}--- +`; + + fs.writeFileSync(parentIndexFile, parentContent); + console.log(`✓ Generated parent index at ${parentIndexFile}`); + } + + // Generate a page for each article (tag) + for (const article of data.articles) { + const pagePath = path.join(contentPath, article.path); + const pageFile = path.join(pagePath, '_index.md'); + + // Create directory if needed + if (!fs.existsSync(pagePath)) { + fs.mkdirSync(pagePath, { recursive: true }); + } + + // Build frontmatter object + const title = article.fields.title || article.fields.name || article.path; + const isConceptual = article.fields.isConceptual === true; + + const frontmatter: Record = { + title, + description: article.fields.description || `API reference for ${title}`, + type: 'api', + layout: isConceptual ? 'single' : 'list', + staticFilePath: article.fields.staticFilePath, + weight: 100, + // Tag-based fields + tag: article.fields.tag, + isConceptual, + menuGroup: article.fields.menuGroup, + }; + + // Add operations for TOC generation (only for non-conceptual pages) + if (!isConceptual && article.fields.operations && article.fields.operations.length > 0) { + frontmatter.operations = article.fields.operations; + } + + // Add tag description for conceptual pages + if (isConceptual && article.fields.tagDescription) { + frontmatter.tagDescription = article.fields.tagDescription; + } + + // Note: We deliberately don't add menu entries for tag-based API pages. + // The API sidebar navigation (api/sidebar-nav.html) handles navigation + // for API reference pages, avoiding conflicts with existing menu items + // like "Query data" and "Write data" that exist in the main sidebar. + + // Add related links if present in article fields + if ( + article.fields.related && + Array.isArray(article.fields.related) && + article.fields.related.length > 0 + ) { + frontmatter.related = article.fields.related; + } + + const pageContent = `--- +${yaml.dump(frontmatter)}--- +`; + + fs.writeFileSync(pageFile, pageContent); + } + + console.log( + `✓ Generated ${data.articles.length} tag-based content pages in ${contentPath}` + ); +} + /** * Product configurations for all InfluxDB editions * * Maps product identifiers to their OpenAPI specs and content directories */ const productConfigs: ProductConfigMap = { - 'cloud-v2': { - specFile: path.join(API_DOCS_ROOT, 'influxdb/cloud/v2/ref.yml'), - pagesDir: path.join(DOCS_ROOT, 'content/influxdb/cloud/api/v2'), - description: 'InfluxDB Cloud (v2 API)', - }, - 'oss-v2': { - specFile: path.join(API_DOCS_ROOT, 'influxdb/v2/v2/ref.yml'), - pagesDir: path.join(DOCS_ROOT, 'content/influxdb/v2/api/v2'), - description: 'InfluxDB OSS v2', - }, + // TODO: v2 products (cloud-v2, oss-v2) are disabled for now because they + // have existing Redoc-based API reference at /reference/api/ + // Uncomment when ready to migrate v2 products to Scalar + // 'cloud-v2': { + // specFile: path.join(API_DOCS_ROOT, 'influxdb/cloud/v2/ref.yml'), + // pagesDir: path.join(DOCS_ROOT, 'content/influxdb/cloud/api'), + // description: 'InfluxDB Cloud (v2 API)', + // menuKey: 'influxdb_cloud', + // }, + // 'oss-v2': { + // specFile: path.join(API_DOCS_ROOT, 'influxdb/v2/v2/ref.yml'), + // pagesDir: path.join(DOCS_ROOT, 'content/influxdb/v2/api'), + // description: 'InfluxDB OSS v2', + // menuKey: 'influxdb_v2', + // }, + // InfluxDB 3 products use tag-based generation for better UX 'influxdb3-core': { specFile: path.join(API_DOCS_ROOT, 'influxdb3/core/v3/ref.yml'), - pagesDir: path.join(DOCS_ROOT, 'content/influxdb3/core/reference/api'), + pagesDir: path.join(DOCS_ROOT, 'content/influxdb3/core'), description: 'InfluxDB 3 Core', + menuKey: 'influxdb3_core', + useTagBasedGeneration: true, }, 'influxdb3-enterprise': { specFile: path.join(API_DOCS_ROOT, 'influxdb3/enterprise/v3/ref.yml'), - pagesDir: path.join( - DOCS_ROOT, - 'content/influxdb3/enterprise/reference/api' - ), + pagesDir: path.join(DOCS_ROOT, 'content/influxdb3/enterprise'), description: 'InfluxDB 3 Enterprise', + menuKey: 'influxdb3_enterprise', + useTagBasedGeneration: true, }, + // Note: Cloud Dedicated, Serverless, and Clustered use management APIs + // with paths like /accounts/{accountId}/... so we put them under /api/ + // These products have existing /reference/api/ pages with menu entries, + // so we skip adding menu entries to the generated parent pages. 'cloud-dedicated': { specFile: path.join( API_DOCS_ROOT, @@ -198,6 +499,8 @@ const productConfigs: ProductConfigMap = { ), pagesDir: path.join(DOCS_ROOT, 'content/influxdb3/cloud-dedicated/api'), description: 'InfluxDB Cloud Dedicated', + menuKey: 'influxdb3_cloud_dedicated', + skipParentMenu: true, }, 'cloud-serverless': { specFile: path.join( @@ -206,6 +509,8 @@ const productConfigs: ProductConfigMap = { ), pagesDir: path.join(DOCS_ROOT, 'content/influxdb3/cloud-serverless/api'), description: 'InfluxDB Cloud Serverless', + menuKey: 'influxdb3_cloud_serverless', + skipParentMenu: true, }, clustered: { specFile: path.join( @@ -214,6 +519,8 @@ const productConfigs: ProductConfigMap = { ), pagesDir: path.join(DOCS_ROOT, 'content/influxdb3/clustered/api'), description: 'InfluxDB Clustered', + menuKey: 'influxdb3_clustered', + skipParentMenu: true, }, }; @@ -249,10 +556,11 @@ function processProduct(productKey: string, config: ProductConfig): void { try { // Step 1: Execute the getswagger.sh script to fetch/bundle the spec + // Note: getswagger.sh must run from api-docs/ because it uses relative paths const getswaggerScript = path.join(API_DOCS_ROOT, 'getswagger.sh'); if (fs.existsSync(getswaggerScript)) { execCommand( - `${getswaggerScript} ${productKey} -B`, + `cd ${API_DOCS_ROOT} && ./getswagger.sh ${productKey} -B`, `Fetching OpenAPI spec for ${productKey}` ); } else { @@ -284,11 +592,39 @@ function processProduct(productKey: string, config: ProductConfig): void { } } - // Step 5: Generate Hugo data from OpenAPI spec (path fragments for AI agents) - generateDataFromOpenAPI(config.specFile, staticPathsPath, articlesPath); - - // Step 6: Generate Hugo content pages from article data - generatePagesFromArticleData(articlesPath, config.pagesDir); + // Step 5: Generate Hugo data from OpenAPI spec + if (config.useTagBasedGeneration) { + // Tag-based generation: group operations by OpenAPI tag + const staticTagsPath = path.join(staticPath, `influxdb-${productKey}/tags`); + console.log(`\n📋 Using tag-based generation for ${productKey}...`); + openapiPathsToHugo.generateHugoDataByTag({ + specFile: config.specFile, + dataOutPath: staticTagsPath, + articleOutPath: articlesPath, + includePaths: true, // Also generate path-based files for backwards compatibility + }); + + // Step 6: Generate Hugo content pages from tag-based article data + generateTagPagesFromArticleData({ + articlesPath, + contentPath: config.pagesDir, + menuKey: config.menuKey, + menuParent: 'InfluxDB HTTP API', + skipParentMenu: config.skipParentMenu, + }); + } else { + // Path-based generation: group paths by URL prefix (legacy) + generateDataFromOpenAPI(config.specFile, staticPathsPath, articlesPath); + + // Step 6: Generate Hugo content pages from path-based article data + generatePagesFromArticleData({ + articlesPath, + contentPath: config.pagesDir, + menuKey: config.menuKey, + menuParent: 'InfluxDB HTTP API', + skipParentMenu: config.skipParentMenu, + }); + } console.log( `\n✅ Successfully processed ${config.description || productKey}\n` diff --git a/api-docs/scripts/openapi-paths-to-hugo-data/index.ts b/api-docs/scripts/openapi-paths-to-hugo-data/index.ts index c634101bef..3d0d0f27ff 100644 --- a/api-docs/scripts/openapi-paths-to-hugo-data/index.ts +++ b/api-docs/scripts/openapi-paths-to-hugo-data/index.ts @@ -211,9 +211,22 @@ interface Tag { name: string; description?: string; externalDocs?: ExternalDocs; + /** Indicates this is a conceptual/supplementary tag (no operations) */ + 'x-traitTag'?: boolean; [key: string]: unknown; } +/** + * Operation metadata for TOC generation + */ +interface OperationMeta { + operationId: string; + method: string; + path: string; + summary: string; + tags: string[]; +} + /** * OpenAPI external docs object */ @@ -236,6 +249,22 @@ interface Article { tags?: string[]; source?: string; staticFilePath?: string; + /** Related documentation links extracted from x-relatedLinks */ + related?: string[]; + /** OpenAPI tags from operations (for Hugo frontmatter) */ + apiTags?: string[]; + /** Menu display name (actual endpoint path, different from Hugo path) */ + menuName?: string; + /** OpenAPI tag name (for tag-based articles) */ + tag?: string; + /** Whether this is a conceptual tag (x-traitTag) */ + isConceptual?: boolean; + /** Tag description from OpenAPI spec */ + tagDescription?: string; + /** Sidebar navigation group */ + menuGroup?: string; + /** Operations metadata for TOC generation */ + operations?: OperationMeta[]; }; } @@ -317,6 +346,212 @@ const openapiUtils = { }, }; +/** + * Convert tag name to URL-friendly slug + * + * @param tagName - Tag name (e.g., "Write data", "Processing engine") + * @returns URL-friendly slug (e.g., "write-data", "processing-engine") + */ +function slugifyTag(tagName: string): string { + return tagName + .toLowerCase() + .replace(/[^a-z0-9]+/g, '-') + .replace(/^-|-$/g, ''); +} + +/** + * Menu group mappings for tag-based navigation + * Maps OpenAPI tags to sidebar groups + */ +const TAG_MENU_GROUPS: Record = { + // Concepts group + 'Quick start': 'Concepts', + 'Authentication': 'Concepts', + 'Headers and parameters': 'Concepts', + 'Response codes': 'Concepts', + // Data Operations group + 'Write data': 'Data Operations', + 'Query data': 'Data Operations', + 'Cache data': 'Data Operations', + // Administration group + 'Database': 'Administration', + 'Table': 'Administration', + 'Token': 'Administration', + // Processing Engine group + 'Processing engine': 'Processing Engine', + // Server group + 'Server information': 'Server', + // Compatibility group + 'Compatibility endpoints': 'Compatibility', +}; + +/** + * Get menu group for a tag + * + * @param tagName - Tag name + * @returns Menu group name or 'Other' if not mapped + */ +function getMenuGroupForTag(tagName: string): string { + return TAG_MENU_GROUPS[tagName] || 'Other'; +} + +/** + * HTTP methods to check for operations + */ +const HTTP_METHODS = [ + 'get', + 'post', + 'put', + 'patch', + 'delete', + 'options', + 'head', + 'trace', +] as const; + +/** + * Extract all operations from an OpenAPI document grouped by tag + * + * @param openapi - OpenAPI document + * @returns Map of tag name to operations with that tag + */ +function extractOperationsByTag( + openapi: OpenAPIDocument +): Map { + const tagOperations = new Map(); + + Object.entries(openapi.paths).forEach(([pathKey, pathItem]) => { + HTTP_METHODS.forEach((method) => { + const operation = pathItem[method] as Operation | undefined; + if (operation) { + const opMeta: OperationMeta = { + operationId: operation.operationId || `${method}-${pathKey}`, + method: method.toUpperCase(), + path: pathKey, + summary: operation.summary || '', + tags: operation.tags || [], + }; + + // Add operation to each of its tags + (operation.tags || []).forEach((tag) => { + if (!tagOperations.has(tag)) { + tagOperations.set(tag, []); + } + tagOperations.get(tag)!.push(opMeta); + }); + } + }); + }); + + return tagOperations; +} + +/** + * Write OpenAPI specs grouped by tag to separate files + * Generates both YAML and JSON versions per tag + * + * @param openapi - OpenAPI document + * @param prefix - Filename prefix for output files + * @param outPath - Output directory path + */ +function writeTagOpenapis( + openapi: OpenAPIDocument, + prefix: string, + outPath: string +): void { + const tagOperations = extractOperationsByTag(openapi); + + // Process each tag + tagOperations.forEach((operations, tagName) => { + // Deep copy openapi + const doc: OpenAPIDocument = JSON.parse(JSON.stringify(openapi)); + + // Filter paths to only include those with operations for this tag + const filteredPaths: Record = {}; + Object.entries(openapi.paths).forEach(([pathKey, pathItem]) => { + const filteredPathItem: PathItem = {}; + let hasOperations = false; + + HTTP_METHODS.forEach((method) => { + const operation = pathItem[method] as Operation | undefined; + if (operation?.tags?.includes(tagName)) { + filteredPathItem[method] = operation; + hasOperations = true; + } + }); + + // Include path-level parameters if we have operations + if (hasOperations) { + if (pathItem.parameters) { + filteredPathItem.parameters = pathItem.parameters; + } + filteredPaths[pathKey] = filteredPathItem; + } + }); + + doc.paths = filteredPaths; + + // Filter tags to only include this tag (and trait tags for context) + if (doc.tags) { + doc.tags = doc.tags.filter( + (tag) => tag.name === tagName || tag['x-traitTag'] + ); + } + + // Update info + const tagSlug = slugifyTag(tagName); + doc.info.title = tagName; + doc.info.description = `API reference for ${tagName}`; + doc['x-tagGroup'] = tagName; + + try { + if (!fs.existsSync(outPath)) { + fs.mkdirSync(outPath, { recursive: true }); + } + + const baseFilename = `${prefix}${tagSlug}`; + const yamlPath = path.resolve(outPath, `${baseFilename}.yaml`); + const jsonPath = path.resolve(outPath, `${baseFilename}.json`); + + writeDataFile(doc, yamlPath); + writeJsonFile(doc, jsonPath); + + console.log( + `Generated tag spec: ${baseFilename}.yaml (${Object.keys(filteredPaths).length} paths, ${operations.length} operations)` + ); + } catch (err) { + console.error(`Error writing tag group ${tagName}:`, err); + } + }); + + // Also create specs for conceptual tags (x-traitTag) without operations + (openapi.tags || []).forEach((tag) => { + if (tag['x-traitTag'] && !tagOperations.has(tag.name)) { + const doc: OpenAPIDocument = JSON.parse(JSON.stringify(openapi)); + doc.paths = {}; + doc.tags = [tag]; + doc.info.title = tag.name; + doc.info.description = tag.description || `API reference for ${tag.name}`; + doc['x-tagGroup'] = tag.name; + + const tagSlug = slugifyTag(tag.name); + + try { + const baseFilename = `${prefix}${tagSlug}`; + const yamlPath = path.resolve(outPath, `${baseFilename}.yaml`); + const jsonPath = path.resolve(outPath, `${baseFilename}.json`); + + writeDataFile(doc, yamlPath); + writeJsonFile(doc, jsonPath); + + console.log(`Generated conceptual tag spec: ${baseFilename}.yaml`); + } catch (err) { + console.error(`Error writing conceptual tag ${tag.name}:`, err); + } + } + }); +} + /** * Write OpenAPI specs grouped by path to separate files * Generates both YAML and JSON versions @@ -364,7 +599,39 @@ function writePathOpenapis( // Deep copy openapi const doc: OpenAPIDocument = JSON.parse(JSON.stringify(openapi)); doc.paths = pathGroups[pg]; - doc.info.title = `${pg}\n${doc.info.title}`; + + // Collect tags used by operations in this path group + const usedTags = new Set(); + Object.values(doc.paths).forEach((pathItem: PathItem) => { + const httpMethods = [ + 'get', + 'post', + 'put', + 'patch', + 'delete', + 'options', + 'head', + 'trace', + ]; + httpMethods.forEach((method) => { + const operation = pathItem[method] as Operation | undefined; + if (operation?.tags) { + operation.tags.forEach((tag) => usedTags.add(tag)); + } + }); + }); + + // Filter tags to only include those used by operations in this path group + // Exclude x-traitTag tags (supplementary documentation tags) + if (doc.tags) { + doc.tags = doc.tags.filter( + (tag) => usedTags.has(tag.name) && !tag['x-traitTag'] + ); + } + + // Simplify info for path-specific docs + doc.info.title = pg; + doc.info.description = `API reference for ${pg}`; doc['x-pathGroup'] = pg; try { @@ -403,26 +670,118 @@ function createArticleDataForPathGroup(openapi: OpenAPIDocument): Article { }; /** - * Convert path to snake case for article path + * Convert OpenAPI path to Hugo-friendly article path + * Legacy endpoints (without /api/ prefix) go under api/ directly + * Versioned endpoints (with /api/vN/) keep their structure + * + * @param p - Path to convert (e.g., '/health', '/api/v3/query_sql') + * @returns Path suitable for Hugo content directory (e.g., 'api/health', 'api/v3/query_sql') + */ + const toHugoPath = (p: string): string => { + if (!p) { + return ''; + } + // If path doesn't start with /api/, it's a legacy endpoint + // Place it directly under api/ to avoid collision with /api/v1/* paths + if (!p.startsWith('/api/')) { + // /health -> api/health + // /write -> api/write + return `api${p}`; + } + // /api/v1/health -> api/v1/health + // /api/v2/write -> api/v2/write + // /api/v3/query_sql -> api/v3/query_sql + return p.replace(/^\//, ''); + }; + + /** + * Convert path to tag-friendly format (dashes instead of slashes) * * @param p - Path to convert - * @returns Snake-cased path + * @returns Tag-friendly path */ - const snakifyPath = (p: string): string => { + const toTagPath = (p: string): string => { if (!p) { return ''; } return p.replace(/^\//, '').replaceAll('/', '-'); }; - article.path = snakifyPath(openapi['x-pathGroup'] || ''); + const pathGroup = openapi['x-pathGroup'] || ''; + article.path = toHugoPath(pathGroup); + // Store original path for menu display (shows actual endpoint path) + article.fields.menuName = pathGroup; article.fields.title = openapi.info?.title; article.fields.description = openapi.description; const pathGroupFrags = path.parse(openapi['x-pathGroup'] || ''); article.fields.tags = [pathGroupFrags?.dir, pathGroupFrags?.name] .filter(Boolean) - .map((t) => snakifyPath(t)); + .map((t) => toTagPath(t)); + + // Extract x-relatedLinks and OpenAPI tags from path items or operations + const relatedLinks: string[] = []; + const apiTags: string[] = []; + const httpMethods = [ + 'get', + 'post', + 'put', + 'patch', + 'delete', + 'options', + 'head', + 'trace', + ]; + + Object.values(openapi.paths).forEach((pathItem: PathItem) => { + // Check path-level x-relatedLinks + if ( + pathItem['x-relatedLinks'] && + Array.isArray(pathItem['x-relatedLinks']) + ) { + relatedLinks.push( + ...(pathItem['x-relatedLinks'] as string[]).filter( + (link) => !relatedLinks.includes(link) + ) + ); + } + + // Check operation-level x-relatedLinks and tags + httpMethods.forEach((method) => { + const operation = pathItem[method] as Operation | undefined; + if (operation) { + // Extract x-relatedLinks + if ( + operation['x-relatedLinks'] && + Array.isArray(operation['x-relatedLinks']) + ) { + relatedLinks.push( + ...(operation['x-relatedLinks'] as string[]).filter( + (link) => !relatedLinks.includes(link) + ) + ); + } + // Extract OpenAPI tags from operation + if (operation.tags && Array.isArray(operation.tags)) { + operation.tags.forEach((tag) => { + if (!apiTags.includes(tag)) { + apiTags.push(tag); + } + }); + } + } + }); + }); + + // Only add related if there are links + if (relatedLinks.length > 0) { + article.fields.related = relatedLinks; + } + + // Add OpenAPI tags from operations (for Hugo frontmatter) + if (apiTags.length > 0) { + article.fields.apiTags = apiTags; + } return article; } @@ -493,6 +852,185 @@ function writeOpenapiArticleData( } } +/** + * Create article data for a tag-based grouping + * + * @param openapi - OpenAPI document with x-tagGroup + * @param operations - Operations for this tag + * @param tagMeta - Tag metadata from OpenAPI spec + * @returns Article metadata object + */ +function createArticleDataForTag( + openapi: OpenAPIDocument, + operations: OperationMeta[], + tagMeta?: Tag +): Article { + const tagName = openapi['x-tagGroup'] as string || ''; + const tagSlug = slugifyTag(tagName); + const isConceptual = tagMeta?.['x-traitTag'] === true; + + const article: Article = { + path: `api/${tagSlug}`, + fields: { + name: tagName, + describes: Object.keys(openapi.paths), + title: tagName, + description: tagMeta?.description || openapi.info?.description || `API reference for ${tagName}`, + tag: tagName, + isConceptual, + menuGroup: getMenuGroupForTag(tagName), + operations: operations.map((op) => ({ + operationId: op.operationId, + method: op.method, + path: op.path, + summary: op.summary, + tags: op.tags, + })), + }, + }; + + // Add tag description for conceptual pages + if (tagMeta?.description) { + article.fields.tagDescription = tagMeta.description; + } + + return article; +} + +/** + * Write tag-based OpenAPI article metadata to Hugo data files + * Generates articles.yml and articles.json + * + * @param sourcePath - Path to directory containing tag-based OpenAPI fragment files + * @param targetPath - Output path for article data + * @param openapi - Original OpenAPI document (for tag metadata) + * @param opts - Options including file pattern filter + */ +function writeOpenapiTagArticleData( + sourcePath: string, + targetPath: string, + openapi: OpenAPIDocument, + opts: WriteOpenapiArticleDataOptions +): void { + const isFile = (filePath: string): boolean => { + return fs.lstatSync(filePath).isFile(); + }; + + const matchesPattern = (filePath: string): boolean => { + return opts.filePattern + ? path.parse(filePath).name.startsWith(opts.filePattern) + : true; + }; + + // Create tag metadata lookup + const tagMetaMap = new Map(); + (openapi.tags || []).forEach((tag) => { + tagMetaMap.set(tag.name, tag); + }); + + try { + const articles = fs + .readdirSync(sourcePath) + .map((fileName) => path.join(sourcePath, fileName)) + .filter(matchesPattern) + .filter(isFile) + .filter( + (filePath) => filePath.endsWith('.yaml') || filePath.endsWith('.yml') + ) + .map((filePath) => { + const tagOpenapi = readFile(filePath); + const tagName = tagOpenapi['x-tagGroup'] as string || tagOpenapi.info?.title || ''; + const tagMeta = tagMetaMap.get(tagName); + + // Extract operations from the tag-filtered spec + const operations: OperationMeta[] = []; + Object.entries(tagOpenapi.paths).forEach(([pathKey, pathItem]) => { + HTTP_METHODS.forEach((method) => { + const operation = pathItem[method] as Operation | undefined; + if (operation) { + operations.push({ + operationId: operation.operationId || `${method}-${pathKey}`, + method: method.toUpperCase(), + path: pathKey, + summary: operation.summary || '', + tags: operation.tags || [], + }); + } + }); + }); + + const article = createArticleDataForTag(tagOpenapi, operations, tagMeta); + article.fields.source = filePath; + article.fields.staticFilePath = filePath.replace(/^static\//, '/'); + return article; + }); + + if (!fs.existsSync(targetPath)) { + fs.mkdirSync(targetPath, { recursive: true }); + } + + const articleCollection: ArticleCollection = { articles }; + + // Write both YAML and JSON versions + const yamlPath = path.resolve(targetPath, 'articles.yml'); + const jsonPath = path.resolve(targetPath, 'articles.json'); + + writeDataFile(articleCollection, yamlPath); + writeJsonFile(articleCollection, jsonPath); + + console.log(`Generated ${articles.length} tag-based articles in ${targetPath}`); + } catch (e) { + console.error('Error writing tag article data:', e); + } +} + +/** + * Options for generating Hugo data by tag + */ +export interface GenerateHugoDataByTagOptions extends GenerateHugoDataOptions { + /** Whether to also generate path-based files (for backwards compatibility) */ + includePaths?: boolean; +} + +/** + * Generate Hugo data files from an OpenAPI specification grouped by tag + * + * This function: + * 1. Reads the OpenAPI spec file + * 2. Groups operations by their OpenAPI tags + * 3. Writes each tag group to separate YAML and JSON files + * 4. Generates tag-based article metadata for Hugo + * + * @param options - Generation options + */ +export function generateHugoDataByTag(options: GenerateHugoDataByTagOptions): void { + const filenamePrefix = `${path.parse(options.specFile).name}-`; + const sourceFile = readFile(options.specFile, 'utf8'); + + // Optionally generate path-based files for backwards compatibility + if (options.includePaths) { + console.log(`\nGenerating OpenAPI path files in ${options.dataOutPath}....`); + writePathOpenapis(sourceFile, filenamePrefix, options.dataOutPath); + } + + // Generate tag-based files + const tagOutPath = options.includePaths + ? path.join(options.dataOutPath, 'tags') + : options.dataOutPath; + + console.log(`\nGenerating OpenAPI tag files in ${tagOutPath}....`); + writeTagOpenapis(sourceFile, filenamePrefix, tagOutPath); + + console.log( + `\nGenerating OpenAPI tag article data in ${options.articleOutPath}...` + ); + writeOpenapiTagArticleData(tagOutPath, options.articleOutPath, sourceFile, { + filePattern: filenamePrefix, + }); + + console.log('\nTag-based generation complete!\n'); +} + /** * Generate Hugo data files from an OpenAPI specification * @@ -525,4 +1063,5 @@ export function generateHugoData(options: GenerateHugoDataOptions): void { // CommonJS export for backward compatibility module.exports = { generateHugoData, + generateHugoDataByTag, }; From 14e5312cbc5d09c560892c115d0bb3d582338e66 Mon Sep 17 00:00:00 2001 From: Jason Stirnaman Date: Mon, 8 Dec 2025 14:05:00 -0600 Subject: [PATCH 08/51] feat(api): Update Hugo layouts for tag-based API documentation - Rewrite single.html for operation pages with RapiDoc integration - Simplify rapidoc.html partial for tag-based rendering - Add sidebar-nav include to sidebar.html for API navigation - Add tab-panels.html and tabs.html for content organization --- layouts/api/single.html | 115 +++++++++++-- layouts/partials/api/rapidoc.html | 247 ++++++++++++++++++--------- layouts/partials/api/renderer.html | 20 +-- layouts/partials/api/scalar.html | 159 +---------------- layouts/partials/api/tab-panels.html | 31 ++++ layouts/partials/api/tabs.html | 10 ++ layouts/partials/sidebar.html | 5 + 7 files changed, 332 insertions(+), 255 deletions(-) create mode 100644 layouts/partials/api/tab-panels.html create mode 100644 layouts/partials/api/tabs.html diff --git a/layouts/api/single.html b/layouts/api/single.html index 3bf97deb1a..b53a1a2496 100644 --- a/layouts/api/single.html +++ b/layouts/api/single.html @@ -1,32 +1,119 @@ {{/* API Documentation Single Page Layout - Uses the renderer abstraction to display API documentation. - The renderer (Scalar or RapiDoc) is selected via site.Params.apiRenderer. + Used for: + - Individual operation pages (nested under tag pages) + - Conceptual pages (isConceptual: true) like Authentication, Quick start + + For operation pages: + - Renders RapiDoc with "Mix your own HTML" slots for custom content + - Shows method badge, path, and operation details + + For conceptual pages: + - Shows Hugo content or tagDescription markdown Required frontmatter: - - staticFilePath: Path to the OpenAPI specification file + - title: Page title + - staticFilePath: Path to OpenAPI spec (for operation pages) + - operationId: Operation ID to render (for operation pages) + - isConceptual: true (for conceptual pages) */}} {{ partial "header.html" . }} {{ partial "topnav.html" . }}
+ {{/* Left: Existing Hugo sidebar (includes API nav via sidebar.html) */}} {{ partial "sidebar.html" . }} -
-
-
-

{{ .Title }}

- {{ with .Description }} -

{{ . }}

+ + {{/* Center + Right: Content and TOC */}} +
+
+
+
+
+
+ {{/* For operation pages, show method badge with title */}} + {{ with .Params.method }} +
+ {{ upper . }} +

{{ $.Title }}

+
+ {{ with $.Params.path }} + {{ . }} + {{ end }} + {{ else }} +

{{ .Title }}

+ {{ end }} + + {{/* Summary/Description */}} + {{ with .Params.summary }} +

{{ . | markdownify }}

+ {{ else }} + {{ with .Description }} +

{{ . | markdownify }}

+ {{ end }} + {{ end }} +
+ + {{/* Download OpenAPI spec button */}} + {{ with .Params.staticFilePath }} + + {{ end }} +
+
+ + {{ $isConceptual := .Params.isConceptual | default false }} + + {{ if $isConceptual }} + {{/* Conceptual Page - Show content directly */}} +
+ {{ with .Content }} + {{ . }} + {{ else }} + {{ with .Params.tagDescription }} + {{ . | markdownify }} + {{ end }} + {{ end }} +
+ {{ else }} + {{/* Operation Page - RapiDoc with custom slots */}} + + {{/* Hugo page content shown as overview */}} + {{ with .Content }} +
+ {{ . }} +
+ {{ end }} + + {{/* RapiDoc renderer with slot-based customization */}} + {{ with .Params.staticFilePath }} +
+ {{ partial "api/rapidoc.html" $ }} +
{{ end }} -
- {{/* Render API documentation using the configured renderer */}} - {{ partial "api/renderer.html" . }} + {{ end }} + + {{/* Related documentation links */}} + {{ partial "article/related.html" . }} + +
+
- - + {{/* Right: Page TOC - "ON THIS PAGE" */}} +
diff --git a/layouts/partials/api/rapidoc.html b/layouts/partials/api/rapidoc.html index 01b491deb8..ad7cc2ebad 100644 --- a/layouts/partials/api/rapidoc.html +++ b/layouts/partials/api/rapidoc.html @@ -1,15 +1,28 @@ {{/* - RapiDoc API Documentation Renderer (Legacy) + RapiDoc API Documentation Renderer - Legacy API documentation renderer using RapiDoc. - Maintained for backward compatibility. + Primary API documentation renderer using RapiDoc with "Mix your own HTML" slots. + See: https://rapidocweb.com/examples.html Required page params: - staticFilePath: Path to the OpenAPI specification file + + Optional page params: + - operationId: Specific operation to display (renders only that operation) + - tag: Tag to filter operations by + + RapiDoc slots available for custom content: + - slot="header" - Custom header + - slot="footer" - Custom footer + - slot="overview" - Custom overview content + - slot="auth" - Custom authentication section + - slot="nav-logo" - Custom navigation logo */}} {{ $specPath := .Params.staticFilePath }} {{ $specPathJSON := replace $specPath ".yaml" ".json" | replace ".yml" ".json" }} +{{ $operationId := .Params.operationId | default "" }} +{{ $tag := .Params.tag | default "" }} {{/* Machine-readable links for AI agent discovery */}} {{ if $specPath }} @@ -18,51 +31,63 @@ {{ end }}
- {{/* Download link for the spec */}} - {{ if $specPath }} - - {{ end }} - - {{/* RapiDoc component */}} + {{/* RapiDoc component with slot-based customization */}} + {{ if $operationId }}goto-path="op/{{ $operationId }}"{{ end }} + {{ if $tag }}match-paths="tag/{{ $tag }}"{{ end }} + > + {{/* Custom overview slot - Hugo page content */}} + {{ with .Content }} +
+ {{ . }} +
+ {{ end }} + + {{/* Custom examples from frontmatter */}} + {{ with .Params.examples }} +
+

Examples

+ {{ range . }} +
+

{{ .title }}

+ {{ with .description }}

{{ . | markdownify }}

{{ end }} +
{{ .code }}
+
+ {{ end }} +
+ {{ end }} +
{{/* Load RapiDoc from CDN */}} @@ -72,29 +97,66 @@ (function() { 'use strict'; + // Detect current theme by checking which theme stylesheet is enabled + function isDarkTheme() { + // Check for enabled dark-theme stylesheet + const darkStylesheet = document.querySelector('link[rel*="stylesheet"][title="dark-theme"]:not([disabled])'); + if (darkStylesheet && !darkStylesheet.disabled) { + return true; + } + + // Fallback: check for data-theme attribute (some pages may use this) + if (document.documentElement.dataset.theme === 'dark') { + return true; + } + + // Fallback: check localStorage preference + try { + const stored = localStorage.getItem('defined_style_preference'); + if (stored) { + const prefs = JSON.parse(stored); + return prefs.theme === 'dark'; + } + } catch (e) { + // Ignore localStorage errors + } + + return false; + } + // Update RapiDoc theme based on document theme function updateRapiDocTheme() { const rapiDoc = document.getElementById('api-doc'); if (!rapiDoc) return; - const isDark = document.documentElement.dataset.theme === 'dark'; + const isDark = isDarkTheme(); if (isDark) { + // Match Hugo dark theme: $grey10: #14141F rapiDoc.setAttribute('theme', 'dark'); - rapiDoc.setAttribute('bg-color', '#1a1a1a'); - rapiDoc.setAttribute('text-color', '#e0e0e0'); - rapiDoc.setAttribute('nav-bg-color', '#2a2a2a'); - rapiDoc.setAttribute('nav-text-color', '#e0e0e0'); - rapiDoc.setAttribute('nav-hover-bg-color', '#3a3a3a'); + rapiDoc.setAttribute('bg-color', '#14141F'); + rapiDoc.setAttribute('text-color', '#D4D7DD'); + rapiDoc.setAttribute('header-color', '#D4D7DD'); + rapiDoc.setAttribute('primary-color', '#a0a0ff'); + rapiDoc.setAttribute('nav-bg-color', '#1a1a2a'); + rapiDoc.setAttribute('nav-text-color', '#D4D7DD'); + rapiDoc.setAttribute('nav-hover-bg-color', '#252535'); rapiDoc.setAttribute('nav-hover-text-color', '#ffffff'); + rapiDoc.setAttribute('nav-accent-color', '#a0a0ff'); + rapiDoc.setAttribute('code-theme', 'monokai'); } else { + // Match Hugo light theme: $g20-white: #FFFFFF rapiDoc.setAttribute('theme', 'light'); rapiDoc.setAttribute('bg-color', '#ffffff'); - rapiDoc.setAttribute('text-color', '#333333'); - rapiDoc.setAttribute('nav-bg-color', '#f5f5f5'); - rapiDoc.setAttribute('nav-text-color', '#333333'); - rapiDoc.setAttribute('nav-hover-bg-color', '#ffebea'); - rapiDoc.setAttribute('nav-hover-text-color', '#9b0700'); + rapiDoc.setAttribute('text-color', '#2b2b2b'); + rapiDoc.setAttribute('header-color', '#020a47'); + rapiDoc.setAttribute('primary-color', '#020a47'); + rapiDoc.setAttribute('nav-bg-color', '#f7f8fa'); + rapiDoc.setAttribute('nav-text-color', '#2b2b2b'); + rapiDoc.setAttribute('nav-hover-bg-color', '#e8e8f0'); + rapiDoc.setAttribute('nav-hover-text-color', '#020a47'); + rapiDoc.setAttribute('nav-accent-color', '#020a47'); + rapiDoc.setAttribute('code-theme', 'prism'); } } @@ -105,15 +167,31 @@ updateRapiDocTheme(); } - // Watch for theme changes + // Watch for stylesheet changes (theme toggles enable/disable stylesheets) const observer = new MutationObserver(function(mutations) { mutations.forEach(function(mutation) { + // Check if a stylesheet's disabled attribute changed + if (mutation.type === 'attributes' && + mutation.target.tagName === 'LINK' && + mutation.target.title && + mutation.target.title.includes('theme')) { + updateRapiDocTheme(); + } + // Also watch for data-theme changes as a fallback if (mutation.attributeName === 'data-theme') { updateRapiDocTheme(); } }); }); + // Observe the head element for stylesheet changes + observer.observe(document.head, { + attributes: true, + attributeFilter: ['disabled'], + subtree: true + }); + + // Also observe the document element for data-theme changes observer.observe(document.documentElement, { attributes: true, attributeFilter: ['data-theme'] @@ -126,54 +204,63 @@ width: 100%; } -.api-spec-actions { - display: flex; - gap: 1rem; - margin-bottom: 1rem; - padding: 1rem 0; - border-bottom: 1px solid var(--color-border, #e0e0e0); +rapi-doc { + width: 100%; + min-height: 600px; + display: block; + /* Override RapiDoc's internal font sizes to match Hugo docs */ + --font-size-small: 15px; + --font-size-mono: 15px; + --font-size-regular: 17px; + /* Match Hugo theme backgrounds - light mode default */ + --bg: #ffffff; + --bg2: #f7f8fa; + --bg3: #eef0f3; } -.api-spec-download { - display: inline-flex; - align-items: center; - gap: 0.5rem; - padding: 0.5rem 1rem; - background-color: var(--color-bg-secondary, #f5f5f5); - color: var(--color-text, #333); - text-decoration: none; +/* Dark mode overrides - match Hugo $grey10: #14141F */ +[data-theme="dark"] rapi-doc, +html:has(link[title="dark-theme"]:not([disabled])) rapi-doc { + --bg: #14141F; + --bg2: #1a1a2a; + --bg3: #252535; + --fg: #D4D7DD; + --fg2: #c8ccd2; + --fg3: #b0b4ba; +} + +/* Custom examples section styling */ +.api-custom-examples { + padding: 1.5rem; + background: var(--bg2, #f7f8fa); border-radius: 4px; - font-size: 0.875rem; - font-weight: 500; - transition: background-color 0.2s, color 0.2s; - border: 1px solid var(--color-border, #e0e0e0); + margin-top: 1rem; } -.api-spec-download:hover { - background-color: #F63C41; - color: white; - border-color: #F63C41; +.api-custom-examples h3 { + margin-top: 0; + margin-bottom: 1rem; + font-size: 1.1rem; } -rapi-doc { - width: 100%; - min-height: 600px; - display: block; +.api-example { + margin-bottom: 1.5rem; } -/* Dark theme adjustments */ -[data-theme="dark"] .api-spec-download { - background-color: #2a2a2a; - color: #e0e0e0; - border-color: #444; +.api-example:last-child { + margin-bottom: 0; } -[data-theme="dark"] .api-spec-download:hover { - background-color: #F63C41; - color: white; +.api-example h4 { + margin: 0 0 0.5rem 0; + font-size: 1rem; } -[data-theme="dark"] .api-spec-actions { - border-color: #444; +.api-example pre { + margin: 0; + padding: 1rem; + background: var(--bg3, #eef0f3); + border-radius: 4px; + overflow-x: auto; } diff --git a/layouts/partials/api/renderer.html b/layouts/partials/api/renderer.html index c6b795f37f..f268bfbff3 100644 --- a/layouts/partials/api/renderer.html +++ b/layouts/partials/api/renderer.html @@ -1,25 +1,23 @@ {{/* API Renderer Abstraction - Selects and loads the appropriate API documentation renderer based on site - configuration. Supports: - - scalar (default): Modern, accessible API documentation - - rapidoc: Legacy renderer for backward compatibility + Selects and loads the appropriate API documentation renderer. + RapiDoc is the primary renderer. Required page params: - staticFilePath: Path to the OpenAPI specification file Site params: - - apiRenderer: "scalar" or "rapidoc" (defaults to "scalar") + - apiRenderer: "rapidoc" (default) or "scalar" */}} -{{ $renderer := site.Params.apiRenderer | default "scalar" }} +{{ $renderer := site.Params.apiRenderer | default "rapidoc" }} -{{ if eq $renderer "scalar" }} - {{ partial "api/scalar.html" . }} -{{ else if eq $renderer "rapidoc" }} +{{ if eq $renderer "rapidoc" }} {{ partial "api/rapidoc.html" . }} -{{ else }} - {{/* Fallback to scalar if unknown renderer specified */}} +{{ else if eq $renderer "scalar" }} {{ partial "api/scalar.html" . }} +{{ else }} + {{/* Fallback to rapidoc if unknown renderer specified */}} + {{ partial "api/rapidoc.html" . }} {{ end }} diff --git a/layouts/partials/api/scalar.html b/layouts/partials/api/scalar.html index 885e286ed8..b9b75bcb2e 100644 --- a/layouts/partials/api/scalar.html +++ b/layouts/partials/api/scalar.html @@ -23,164 +23,23 @@ {{ end }}
- {{/* Download link for the spec */}} - {{ if $specPath }} - - {{ end }} - - {{/* Scalar API Reference container */}} -
+ {{/* Download button is now in single.html header row above tabs */}} + + {{/* Scalar API Reference container (component-driven) */}} +
-{{/* Load Scalar from CDN */}} - - - - diff --git a/layouts/partials/api/tab-panels.html b/layouts/partials/api/tab-panels.html new file mode 100644 index 0000000000..68d28973c0 --- /dev/null +++ b/layouts/partials/api/tab-panels.html @@ -0,0 +1,31 @@ +{{/* + API Reference Tab Panels (DEPRECATED) + + This partial is kept for backward compatibility. + The new architecture renders content directly in layouts: + - layouts/api/list.html: Tag pages with operations list + - layouts/api/single.html: Individual operation pages with RapiDoc + + For conceptual pages (isConceptual: true), renders tag description content. + For operational pages, renders the API documentation via RapiDoc. +*/}} + +{{ $isConceptual := .Params.isConceptual | default false }} + +{{ if $isConceptual }} +{{/* Conceptual Page - Display tag description content only */}} +
+ {{ with .Content }} + {{ . }} + {{ else }} + {{ with .Params.tagDescription }} + {{ . | markdownify }} + {{ end }} + {{ end }} +
+{{ else }} +{{/* Operations Page - RapiDoc renderer */}} +
+ {{ partial "api/rapidoc.html" . }} +
+{{ end }} diff --git a/layouts/partials/api/tabs.html b/layouts/partials/api/tabs.html new file mode 100644 index 0000000000..a33099cc0f --- /dev/null +++ b/layouts/partials/api/tabs.html @@ -0,0 +1,10 @@ +{{/* + API Reference Page Tabs (DEPRECATED) + + This partial is kept for backward compatibility but renders nothing. + The new architecture uses: + - Tag pages (list.html): Display operations list directly + - Operation pages (single.html): Display RapiDoc for single operation +*/}} + +{{/* No tabs rendered - using simplified layout */}} diff --git a/layouts/partials/sidebar.html b/layouts/partials/sidebar.html index 85caa0d2f2..1352bccf12 100644 --- a/layouts/partials/sidebar.html +++ b/layouts/partials/sidebar.html @@ -110,4 +110,9 @@

Additional resources

{{ end }} + + {{/* API Reference Navigation - shown only for API pages */}} + {{ if eq .Type "api" }} + {{ partial "api/sidebar-nav.html" . }} + {{ end }} From 40cf280c5ee6bc433436ee9ef3bf2d2f9d3ff797 Mon Sep 17 00:00:00 2001 From: Jason Stirnaman Date: Mon, 8 Dec 2025 14:06:43 -0600 Subject: [PATCH 09/51] feat(api): Add TypeScript components for API UI interactions - Add api-nav.ts for sidebar navigation collapse/expand - Add api-scalar.ts for Scalar API renderer integration - Add api-tabs.ts for tab switching functionality - Add api-toc.ts for table of contents generation - Register components in main.js --- assets/js/components/api-nav.ts | 76 +++++ assets/js/components/api-scalar.ts | 326 ++++++++++++++++++++++ assets/js/components/api-tabs.ts | 144 ++++++++++ assets/js/components/api-toc.ts | 434 +++++++++++++++++++++++++++++ assets/js/main.js | 8 + 5 files changed, 988 insertions(+) create mode 100644 assets/js/components/api-nav.ts create mode 100644 assets/js/components/api-scalar.ts create mode 100644 assets/js/components/api-tabs.ts create mode 100644 assets/js/components/api-toc.ts diff --git a/assets/js/components/api-nav.ts b/assets/js/components/api-nav.ts new file mode 100644 index 0000000000..876ca80926 --- /dev/null +++ b/assets/js/components/api-nav.ts @@ -0,0 +1,76 @@ +/** + * API Navigation Component + * + * Handles collapsible navigation groups in the API sidebar. + * Features: + * - Toggle expand/collapse on group headers + * - ARIA accessibility support + * - Keyboard navigation + * + * Usage: + * + */ + +interface ComponentOptions { + component: HTMLElement; +} + +/** + * Initialize API Navigation component + */ +export default function ApiNav({ component }: ComponentOptions): void { + const headers = component.querySelectorAll( + '.api-nav-group-header' + ); + + headers.forEach((header) => { + header.addEventListener('click', () => { + const isOpen = header.classList.toggle('is-open'); + header.setAttribute('aria-expanded', String(isOpen)); + + const items = header.nextElementSibling; + if (items) { + items.classList.toggle('is-open', isOpen); + } + }); + + // Keyboard support - Enter and Space already work for buttons + // but add support for arrow keys to navigate between groups + header.addEventListener('keydown', (event: KeyboardEvent) => { + const allHeaders = Array.from(headers); + const currentIndex = allHeaders.indexOf(header); + + switch (event.key) { + case 'ArrowDown': + event.preventDefault(); + if (currentIndex < allHeaders.length - 1) { + allHeaders[currentIndex + 1].focus(); + } + break; + case 'ArrowUp': + event.preventDefault(); + if (currentIndex > 0) { + allHeaders[currentIndex - 1].focus(); + } + break; + case 'Home': + event.preventDefault(); + allHeaders[0].focus(); + break; + case 'End': + event.preventDefault(); + allHeaders[allHeaders.length - 1].focus(); + break; + } + }); + }); +} diff --git a/assets/js/components/api-scalar.ts b/assets/js/components/api-scalar.ts new file mode 100644 index 0000000000..62161a2140 --- /dev/null +++ b/assets/js/components/api-scalar.ts @@ -0,0 +1,326 @@ +/** + * Scalar API Documentation Component + * + * Initializes the Scalar API reference viewer for OpenAPI documentation. + * Features: + * - Dynamic CDN loading of Scalar library + * - Theme synchronization with site theme + * - InfluxData brand colors + * - Error handling and fallback UI + * + * Usage: + *
+ */ + +import { getPreference } from '../services/local-storage.js'; + +interface ComponentOptions { + component: HTMLElement; +} + +interface ScalarConfig { + url: string; + forceDarkModeState?: 'dark' | 'light'; + layout?: 'classic' | 'modern'; + showSidebar?: boolean; + hideDarkModeToggle?: boolean; + hideSearch?: boolean; + documentDownloadType?: 'none' | 'yaml' | 'json'; + hideModels?: boolean; + hideTestRequestButton?: boolean; + withDefaultFonts?: boolean; + customCss?: string; +} + +type ScalarCreateFn = ( + selector: string | HTMLElement, + config: ScalarConfig +) => void; + +declare global { + interface Window { + Scalar?: { + createApiReference: ScalarCreateFn; + }; + } +} + +const SCALAR_CDN = 'https://cdn.jsdelivr.net/npm/@scalar/api-reference@latest'; + +/** + * Load script dynamically + */ +function loadScript(src: string, timeout = 8000): Promise { + return new Promise((resolve, reject) => { + // Check if script already exists + const existing = Array.from(document.scripts).find( + (s) => s.src && s.src.includes(src) + ); + if (existing && window.Scalar?.createApiReference) { + return resolve(); + } + + const script = document.createElement('script'); + script.src = src; + script.defer = true; + script.onload = () => resolve(); + script.onerror = () => reject(new Error(`Failed to load script: ${src}`)); + + document.head.appendChild(script); + + // Fallback timeout + setTimeout(() => { + if (window.Scalar?.createApiReference) { + resolve(); + } else { + reject(new Error(`Timeout loading script: ${src}`)); + } + }, timeout); + }); +} + +/** + * Get current theme from localStorage (source of truth for Hugo theme system) + */ +function getTheme(): 'dark' | 'light' { + const theme = getPreference('theme'); + return theme === 'dark' ? 'dark' : 'light'; +} + +/** + * Poll for Scalar availability + */ +function waitForScalar(maxAttempts = 50, interval = 100): Promise { + return new Promise((resolve, reject) => { + let attempts = 0; + + const checkInterval = setInterval(() => { + attempts++; + + if (window.Scalar?.createApiReference) { + clearInterval(checkInterval); + resolve(); + } else if (attempts >= maxAttempts) { + clearInterval(checkInterval); + reject( + new Error(`Scalar not available after ${maxAttempts * interval}ms`) + ); + } + }, interval); + }); +} + +/** + * Initialize Scalar API reference + */ +async function initScalar( + container: HTMLElement, + specUrl: string +): Promise { + if (!window.Scalar?.createApiReference) { + throw new Error('Scalar is not available'); + } + + // Clean up previous Scalar instance (important for theme switching) + // Remove any Scalar-injected content and classes + container.innerHTML = ''; + // Remove Scalar's dark-mode class from body if it exists + document.body.classList.remove('dark-mode'); + + const isDark = getTheme() === 'dark'; + + window.Scalar.createApiReference(container, { + url: specUrl, + forceDarkModeState: getTheme(), + layout: 'classic', + showSidebar: false, + hideDarkModeToggle: true, + hideSearch: true, + documentDownloadType: 'none', + hideModels: false, + hideTestRequestButton: false, + withDefaultFonts: false, + customCss: ` + :root { + /* Typography - match Hugo docs site */ + --scalar-font: 'Proxima Nova', -apple-system, BlinkMacSystemFont, 'Segoe UI', sans-serif; + --scalar-font-code: 'IBM Plex Mono', Monaco, Consolas, monospace; + --scalar-font-size-base: 16px; + --scalar-line-height: 1.65; + + /* InfluxData brand colors */ + --scalar-color-1: #F63C41; + --scalar-color-2: #d32f34; + --scalar-color-accent: #F63C41; + + /* Border radius */ + --scalar-radius: 4px; + --scalar-radius-lg: 8px; + + /* Background and text colors - theme-aware */ + --scalar-background-1: ${isDark ? '#1a1a2e' : '#ffffff'}; + --scalar-background-2: ${isDark ? '#232338' : '#f7f8fa'}; + --scalar-background-3: ${isDark ? '#2d2d44' : '#f0f2f5'}; + --scalar-text-1: ${isDark ? '#e0e0e0' : '#2b2b2b'}; + --scalar-text-2: ${isDark ? '#a0a0a0' : '#545454'}; + --scalar-text-3: ${isDark ? '#888888' : '#757575'}; + --scalar-border-color: ${isDark ? '#3a3a50' : '#e0e0e0'}; + + /* Heading colors */ + --scalar-heading-color: ${isDark ? '#ffffff' : '#2b2b2b'}; + } + + /* Match Hugo heading styles */ + h1, h2, h3, h4, h5, h6 { + font-family: var(--scalar-font); + font-weight: 600; + color: var(--scalar-heading-color); + line-height: 1.25; + } + + h1 { font-size: 2rem; } + h2 { font-size: 1.5rem; margin-top: 2rem; } + h3 { font-size: 1.25rem; margin-top: 1.5rem; } + h4 { font-size: 1rem; margin-top: 1rem; } + + /* Body text size */ + p, li, td, th { + font-size: 1rem; + line-height: var(--scalar-line-height); + } + + /* Code block styling */ + pre, code { + font-family: var(--scalar-font-code); + font-size: 0.875rem; + } + + /* Hide section-content div */ + div.section-content { + display: none !important; + } + `, + }); + + console.log( + '[API Docs] Scalar initialized with spec:', + specUrl, + 'theme:', + getTheme() + ); +} + +/** + * Show error message in container + */ +function showError(container: HTMLElement, message: string): void { + container.innerHTML = `

${message}

`; +} + +/** + * Watch for Hugo theme changes via stylesheet manipulation + * Hugo theme.js enables/disables link[title*="theme"] elements + */ +function watchThemeChanges(container: HTMLElement, specUrl: string): void { + // Watch for stylesheet changes in the document + const observer = new MutationObserver(() => { + const currentTheme = getTheme(); + console.log('[API Docs] Theme changed to:', currentTheme); + // Re-initialize Scalar with new theme + initScalar(container, specUrl).catch((error) => { + console.error( + '[API Docs] Failed to re-initialize Scalar on theme change:', + error + ); + }); + }); + + // Watch for changes to stylesheet link elements + const head = document.querySelector('head'); + if (head) { + observer.observe(head, { + attributes: true, + attributeFilter: ['disabled'], + subtree: true, + }); + } + + // Also watch for localStorage changes from other tabs + window.addEventListener('storage', (event) => { + if (event.key === 'influxdata_docs_preferences' && event.newValue) { + try { + const prefs = JSON.parse(event.newValue); + if (prefs.theme) { + const currentTheme = getTheme(); + console.log( + '[API Docs] Theme changed via storage event to:', + currentTheme + ); + initScalar(container, specUrl).catch((error) => { + console.error( + '[API Docs] Failed to re-initialize Scalar on storage change:', + error + ); + }); + } + } catch (error) { + console.error( + '[API Docs] Failed to parse localStorage preferences:', + error + ); + } + } + }); +} + +/** + * Initialize API Scalar component + */ +export default async function ApiScalar({ + component, +}: ComponentOptions): Promise { + try { + // Get spec path from data attribute + const specPath = component.dataset.specPath; + const cdn = component.dataset.cdn || SCALAR_CDN; + + if (!specPath) { + console.error('[API Docs] No OpenAPI specification path provided'); + showError( + component, + 'Error: No API specification configured for this page.' + ); + return; + } + + // Build full URL for spec (Scalar needs absolute URL) + const specUrl = window.location.origin + specPath; + + // Load Scalar from CDN if not already loaded + if (!window.Scalar?.createApiReference) { + try { + await loadScript(cdn); + } catch (err) { + console.error('[API Docs] Failed to load Scalar from CDN', err); + } + } + + // Wait for Scalar to be ready + try { + await waitForScalar(); + } catch (err) { + console.error('[API Docs] Scalar failed to initialize', err); + showError(component, 'Error: API viewer failed to load.'); + return; + } + + // Initialize Scalar + await initScalar(component, specUrl); + + // Watch for theme changes and re-initialize Scalar when theme changes + watchThemeChanges(component, specUrl); + } catch (err) { + console.error('[API Docs] ApiScalar component error', err); + showError(component, 'Error: API viewer failed to initialize.'); + } +} diff --git a/assets/js/components/api-tabs.ts b/assets/js/components/api-tabs.ts new file mode 100644 index 0000000000..25b41ec16f --- /dev/null +++ b/assets/js/components/api-tabs.ts @@ -0,0 +1,144 @@ +/** + * API Tabs Component + * + * Handles tab switching for API reference documentation. + * Uses data-tab and data-tab-panel attributes for explicit panel targeting, + * unlike the generic tabs which use positional indexing. + * + * Features: + * - Explicit panel targeting via data-tab-panel + * - Deep linking via URL hash + * - Browser back/forward navigation support + * - Custom event dispatch for TOC updates + * + * Usage: + *
+ * + *
+ *
+ *
...
+ *
...
+ *
+ */ + +interface ComponentOptions { + component: HTMLElement; +} + +/** + * Find the panels container (sibling element after tabs) + */ +function findPanelsContainer(tabsWrapper: HTMLElement): HTMLElement | null { + let sibling = tabsWrapper.nextElementSibling; + while (sibling) { + if (sibling.classList.contains('api-tab-panels')) { + return sibling as HTMLElement; + } + sibling = sibling.nextElementSibling; + } + return null; +} + +/** + * Switch to a specific tab + */ +function switchTab( + tabsWrapper: HTMLElement, + panelsContainer: HTMLElement, + tabId: string, + updateHash = true +): void { + // Update active tab + const tabs = tabsWrapper.querySelectorAll('[data-tab]'); + tabs.forEach((tab) => { + if (tab.dataset.tab === tabId) { + tab.classList.add('is-active'); + } else { + tab.classList.remove('is-active'); + } + }); + + // Update visible panel + const panels = + panelsContainer.querySelectorAll('[data-tab-panel]'); + panels.forEach((panel) => { + if (panel.dataset.tabPanel === tabId) { + panel.style.display = 'block'; + } else { + panel.style.display = 'none'; + } + }); + + // Update URL hash without scrolling + if (updateHash) { + history.replaceState(null, '', '#' + tabId); + } + + // Dispatch custom event for TOC update + document.dispatchEvent( + new CustomEvent('api-tab-change', { detail: { tab: tabId } }) + ); +} + +/** + * Get tab ID from URL hash + */ +function getTabFromHash(): string | null { + const hash = window.location.hash.substring(1); + return hash || null; +} + +/** + * Initialize API Tabs component + */ +export default function ApiTabs({ component }: ComponentOptions): void { + const panelsContainer = findPanelsContainer(component); + + if (!panelsContainer) { + console.warn('[API Tabs] No .api-tab-panels container found'); + return; + } + + const tabs = component.querySelectorAll('[data-tab]'); + + if (tabs.length === 0) { + console.warn('[API Tabs] No tabs found with data-tab attribute'); + return; + } + + // Handle tab clicks + tabs.forEach((tab) => { + tab.addEventListener('click', (e) => { + e.preventDefault(); + e.stopPropagation(); // Prevent other tab handlers from firing + + const tabId = tab.dataset.tab; + if (tabId) { + switchTab(component, panelsContainer, tabId); + } + }); + }); + + // Handle deep linking via URL hash on load + const hashTab = getTabFromHash(); + if (hashTab) { + const matchingTab = component.querySelector(`[data-tab="${hashTab}"]`); + if (matchingTab) { + switchTab(component, panelsContainer, hashTab, false); + } + } + + // Handle browser back/forward navigation + window.addEventListener('hashchange', () => { + const newTabId = getTabFromHash(); + if (newTabId) { + const matchingTab = component.querySelector(`[data-tab="${newTabId}"]`); + if (matchingTab) { + switchTab(component, panelsContainer, newTabId, false); + } + } + }); +} diff --git a/assets/js/components/api-toc.ts b/assets/js/components/api-toc.ts new file mode 100644 index 0000000000..06b0d2f589 --- /dev/null +++ b/assets/js/components/api-toc.ts @@ -0,0 +1,434 @@ +/** + * API Table of Contents Component + * + * Generates "ON THIS PAGE" navigation from content headings or operations data. + * Features: + * - Builds TOC from h2/h3 headings in the active tab panel (legacy) + * - Builds TOC from operations data passed via data-operations attribute (tag-based) + * - Highlights current section on scroll (intersection observer) + * - Smooth scroll to anchors + * - Updates when tab changes + * + * Usage: + * + */ + +interface ComponentOptions { + component: HTMLElement; +} + +interface TocEntry { + id: string; + text: string; + level: number; +} + +/** + * Operation metadata from frontmatter (for tag-based pages) + */ +interface OperationMeta { + operationId: string; + method: string; + path: string; + summary: string; + tags: string[]; +} + +/** + * Check if the active panel contains a RapiDoc component + */ +function isRapiDocActive(): boolean { + const activePanel = document.querySelector( + '.tab-content:not([style*="display: none"]), [data-tab-panel]:not([style*="display: none"])' + ); + return activePanel?.querySelector('rapi-doc') !== null; +} + +/** + * Get headings from the currently visible content + */ +function getVisibleHeadings(): TocEntry[] { + // Find the active tab panel or main content area + const activePanel = document.querySelector( + '.tab-content:not([style*="display: none"]), [data-tab-panel]:not([style*="display: none"]), .article--content' + ); + + if (!activePanel) { + return []; + } + + const headings = activePanel.querySelectorAll('h2, h3'); + const entries: TocEntry[] = []; + + headings.forEach((heading) => { + // Skip headings without IDs + if (!heading.id) { + return; + } + + // Skip hidden headings + const rect = heading.getBoundingClientRect(); + if (rect.width === 0 && rect.height === 0) { + return; + } + + entries.push({ + id: heading.id, + text: heading.textContent?.trim() || '', + level: heading.tagName === 'H2' ? 2 : 3, + }); + }); + + return entries; +} + +/** + * Build TOC HTML from entries + */ +function buildTocHtml(entries: TocEntry[]): string { + if (entries.length === 0) { + // Check if RapiDoc is active - show helpful message + if (isRapiDocActive()) { + return '

Use RapiDoc\'s navigation below to explore this endpoint.

'; + } + return '

No sections on this page.

'; + } + + let html = '
    '; + + entries.forEach((entry) => { + const indent = entry.level === 3 ? ' api-toc-item--nested' : ''; + html += ` +
  • + ${entry.text} +
  • + `; + }); + + html += '
'; + return html; +} + +/** + * Get method badge class for HTTP method + */ +function getMethodClass(method: string): string { + const m = method.toLowerCase(); + switch (m) { + case 'get': + return 'api-method--get'; + case 'post': + return 'api-method--post'; + case 'put': + return 'api-method--put'; + case 'patch': + return 'api-method--patch'; + case 'delete': + return 'api-method--delete'; + default: + return ''; + } +} + +/** + * Build TOC HTML from operations data (for tag-based pages) + */ +function buildOperationsTocHtml(operations: OperationMeta[]): string { + if (operations.length === 0) { + return '

No operations on this page.

'; + } + + let html = '
    '; + + operations.forEach((op) => { + // Generate anchor ID from operationId (Scalar uses operationId for anchors) + const anchorId = op.operationId; + const methodClass = getMethodClass(op.method); + + html += ` +
  • + + ${op.method.toUpperCase()} + ${op.path} + +
  • + `; + }); + + html += '
'; + return html; +} + +/** + * Parse operations from data attribute + */ +function parseOperationsData(component: HTMLElement): OperationMeta[] | null { + const dataAttr = component.getAttribute('data-operations'); + if (!dataAttr) { + return null; + } + + try { + const operations = JSON.parse(dataAttr) as OperationMeta[]; + return Array.isArray(operations) ? operations : null; + } catch (e) { + console.warn('[API TOC] Failed to parse operations data:', e); + return null; + } +} + +/** + * Set up intersection observer for scroll highlighting + */ +function setupScrollHighlighting( + container: HTMLElement, + entries: TocEntry[] +): IntersectionObserver | null { + if (entries.length === 0) { + return null; + } + + const headingIds = entries.map((e) => e.id); + const links = container.querySelectorAll('.api-toc-link'); + + // Create a map of heading ID to link element + const linkMap = new Map(); + links.forEach((link) => { + const href = link.getAttribute('href'); + if (href?.startsWith('#')) { + linkMap.set(href.slice(1), link); + } + }); + + // Track which headings are visible + const visibleHeadings = new Set(); + + const observer = new IntersectionObserver( + (observerEntries) => { + observerEntries.forEach((entry) => { + const id = entry.target.id; + + if (entry.isIntersecting) { + visibleHeadings.add(id); + } else { + visibleHeadings.delete(id); + } + }); + + // Find the first visible heading (in document order) + let activeId: string | null = null; + for (const id of headingIds) { + if (visibleHeadings.has(id)) { + activeId = id; + break; + } + } + + // If no heading is visible, use the last one that was scrolled past + if (!activeId && visibleHeadings.size === 0) { + const scrollY = window.scrollY; + for (let i = headingIds.length - 1; i >= 0; i--) { + const heading = document.getElementById(headingIds[i]); + if (heading && heading.offsetTop < scrollY + 100) { + activeId = headingIds[i]; + break; + } + } + } + + // Update active state on links + links.forEach((link) => { + link.classList.remove('is-active'); + }); + + if (activeId) { + const activeLink = linkMap.get(activeId); + activeLink?.classList.add('is-active'); + } + }, + { + rootMargin: '-80px 0px -70% 0px', + threshold: 0, + } + ); + + // Observe all headings + headingIds.forEach((id) => { + const heading = document.getElementById(id); + if (heading) { + observer.observe(heading); + } + }); + + return observer; +} + +/** + * Set up smooth scroll for TOC links + */ +function setupSmoothScroll(container: HTMLElement): void { + container.addEventListener('click', (event) => { + const target = event.target as HTMLElement; + const link = target.closest('.api-toc-link'); + + if (!link) { + return; + } + + const href = link.getAttribute('href'); + if (!href?.startsWith('#')) { + return; + } + + const targetElement = document.getElementById(href.slice(1)); + if (!targetElement) { + return; + } + + event.preventDefault(); + + // Scroll with offset for fixed header + const headerOffset = 80; + const elementPosition = targetElement.getBoundingClientRect().top; + const offsetPosition = elementPosition + window.scrollY - headerOffset; + + window.scrollTo({ + top: offsetPosition, + behavior: 'smooth', + }); + + // Update URL hash without jumping + history.pushState(null, '', href); + }); +} + +/** + * Update TOC visibility based on active tab + * Hide TOC for Operations tab (RapiDoc has built-in navigation) + */ +function updateTocVisibility(container: HTMLElement): void { + const operationsPanel = document.querySelector( + '[data-tab-panel="operations"]' + ); + const isOperationsVisible = + operationsPanel && + !operationsPanel.getAttribute('style')?.includes('display: none'); + + if (isOperationsVisible) { + container.classList.add('is-hidden'); + } else { + container.classList.remove('is-hidden'); + } +} + +/** + * Watch for tab changes to rebuild TOC + */ +function watchTabChanges( + container: HTMLElement, + rebuild: () => void +): MutationObserver { + const tabPanels = document.querySelector('.api-tab-panels'); + + if (!tabPanels) { + return new MutationObserver(() => {}); + } + + const observer = new MutationObserver((mutations) => { + // Check if any tab panel visibility changed + const hasVisibilityChange = mutations.some((mutation) => { + return ( + mutation.type === 'attributes' && + (mutation.attributeName === 'style' || + mutation.attributeName === 'class') + ); + }); + + if (hasVisibilityChange) { + // Update visibility based on active tab + updateTocVisibility(container); + // Debounce rebuild + setTimeout(rebuild, 100); + } + }); + + observer.observe(tabPanels, { + attributes: true, + subtree: true, + attributeFilter: ['style', 'class'], + }); + + return observer; +} + +/** + * Initialize API TOC component + */ +export default function ApiToc({ component }: ComponentOptions): void { + const nav = component.querySelector('.api-toc-nav'); + + if (!nav) { + console.warn('[API TOC] No .api-toc-nav element found'); + return; + } + + // Check for operations data (tag-based pages) + const operations = parseOperationsData(component); + let observer: IntersectionObserver | null = null; + + /** + * Rebuild the TOC + */ + function rebuild(): void { + // Clean up previous observer + if (observer) { + observer.disconnect(); + observer = null; + } + + // If operations data is present, build operations-based TOC + if (operations && operations.length > 0) { + if (nav) { + nav.innerHTML = buildOperationsTocHtml(operations); + } + // Don't hide TOC for tag-based pages - always show operations + component.classList.remove('is-hidden'); + return; + } + + // Otherwise, fall back to heading-based TOC + const entries = getVisibleHeadings(); + if (nav) { + nav.innerHTML = buildTocHtml(entries); + } + + // Set up scroll highlighting + observer = setupScrollHighlighting(component, entries); + } + + // Check initial visibility (hide for Operations tab, only for non-operations pages) + if (!operations || operations.length === 0) { + updateTocVisibility(component); + } + + // Initial build + rebuild(); + + // Set up smooth scroll + setupSmoothScroll(component); + + // Watch for tab changes (only for non-operations pages) + if (!operations || operations.length === 0) { + watchTabChanges(component, rebuild); + } + + // Also rebuild on window resize (headings may change visibility) + let resizeTimeout: number; + window.addEventListener('resize', () => { + clearTimeout(resizeTimeout); + resizeTimeout = window.setTimeout(rebuild, 250); + }); +} diff --git a/assets/js/main.js b/assets/js/main.js index 826ad9a116..a3cfbaddc5 100644 --- a/assets/js/main.js +++ b/assets/js/main.js @@ -46,6 +46,10 @@ import SidebarSearch from './components/sidebar-search.js'; import { SidebarToggle } from './sidebar-toggle.js'; import Theme from './theme.js'; import ThemeSwitch from './theme-switch.js'; +import ApiNav from './components/api-nav.ts'; +import ApiScalar from './components/api-scalar.ts'; +import ApiTabs from './components/api-tabs.ts'; +import ApiToc from './components/api-toc.ts'; /** * Component Registry @@ -77,6 +81,10 @@ const componentRegistry = { 'sidebar-toggle': SidebarToggle, theme: Theme, 'theme-switch': ThemeSwitch, + 'api-nav': ApiNav, + 'api-scalar': ApiScalar, + 'api-tabs': ApiTabs, + 'api-toc': ApiToc, }; /** From fca01004c5292a694b7d9b19ff2a475b2eb39630 Mon Sep 17 00:00:00 2001 From: Jason Stirnaman Date: Mon, 8 Dec 2025 14:07:01 -0600 Subject: [PATCH 10/51] feat(api): Add article data files and sidebar navigation partial - Add YAML article data files for all InfluxDB products - Add sidebar-nav.html partial for API navigation rendering - Rename data directory from article-data to article_data for Hugo compatibility - Remove obsolete JSON articles file --- .../influxdb/cloud-v2/articles.json | 597 --------- .../influxdb/cloud-dedicated/articles.yml | 24 + .../influxdb/cloud-v2/articles.yml | 499 ++++++++ .../influxdb/clustered/articles.yml | 47 + .../influxdb/influxdb3_core/articles.yml | 1094 +++++++++++++++++ .../influxdb3_enterprise/articles.yml | 246 ++++ .../article_data/influxdb/oss-v2/articles.yml | 757 ++++++++++++ layouts/partials/api/sidebar-nav.html | 244 ++++ 8 files changed, 2911 insertions(+), 597 deletions(-) delete mode 100644 data/article-data/influxdb/cloud-v2/articles.json create mode 100644 data/article_data/influxdb/cloud-dedicated/articles.yml create mode 100644 data/article_data/influxdb/cloud-v2/articles.yml create mode 100644 data/article_data/influxdb/clustered/articles.yml create mode 100644 data/article_data/influxdb/influxdb3_core/articles.yml create mode 100644 data/article_data/influxdb/influxdb3_enterprise/articles.yml create mode 100644 data/article_data/influxdb/oss-v2/articles.yml create mode 100644 layouts/partials/api/sidebar-nav.html diff --git a/data/article-data/influxdb/cloud-v2/articles.json b/data/article-data/influxdb/cloud-v2/articles.json deleted file mode 100644 index 2267c4303d..0000000000 --- a/data/article-data/influxdb/cloud-v2/articles.json +++ /dev/null @@ -1,597 +0,0 @@ -{ - "articles": [ - { - "path": "api-v2-authorizations", - "fields": { - "name": "/api/v2/authorizations", - "describes": [ - "/api/v2/authorizations", - "/api/v2/authorizations/{authID}" - ], - "title": "/api/v2/authorizations\nInfluxDB Cloud API Service", - "tags": [ - "api-v2", - "authorizations" - ], - "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-authorizations.yaml", - "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-authorizations.yaml" - } - }, - { - "path": "api-v2-buckets", - "fields": { - "name": "/api/v2/buckets", - "describes": [ - "/api/v2/buckets", - "/api/v2/buckets/{bucketID}", - "/api/v2/buckets/{bucketID}/labels", - "/api/v2/buckets/{bucketID}/labels/{labelID}", - "/api/v2/buckets/{bucketID}/members", - "/api/v2/buckets/{bucketID}/members/{userID}", - "/api/v2/buckets/{bucketID}/owners", - "/api/v2/buckets/{bucketID}/owners/{userID}", - "/api/v2/buckets/{bucketID}/schema/measurements", - "/api/v2/buckets/{bucketID}/schema/measurements/{measurementID}" - ], - "title": "/api/v2/buckets\nInfluxDB Cloud API Service", - "tags": [ - "api-v2", - "buckets" - ], - "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-buckets.yaml", - "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-buckets.yaml" - } - }, - { - "path": "api-v2-checks", - "fields": { - "name": "/api/v2/checks", - "describes": [ - "/api/v2/checks", - "/api/v2/checks/{checkID}", - "/api/v2/checks/{checkID}/labels", - "/api/v2/checks/{checkID}/labels/{labelID}", - "/api/v2/checks/{checkID}/query" - ], - "title": "/api/v2/checks\nInfluxDB Cloud API Service", - "tags": [ - "api-v2", - "checks" - ], - "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-checks.yaml", - "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-checks.yaml" - } - }, - { - "path": "api-v2-dashboards", - "fields": { - "name": "/api/v2/dashboards", - "describes": [ - "/api/v2/dashboards", - "/api/v2/dashboards/{dashboardID}", - "/api/v2/dashboards/{dashboardID}/cells", - "/api/v2/dashboards/{dashboardID}/cells/{cellID}", - "/api/v2/dashboards/{dashboardID}/cells/{cellID}/view", - "/api/v2/dashboards/{dashboardID}/labels", - "/api/v2/dashboards/{dashboardID}/labels/{labelID}", - "/api/v2/dashboards/{dashboardID}/members", - "/api/v2/dashboards/{dashboardID}/members/{userID}", - "/api/v2/dashboards/{dashboardID}/owners", - "/api/v2/dashboards/{dashboardID}/owners/{userID}" - ], - "title": "/api/v2/dashboards\nInfluxDB Cloud API Service", - "tags": [ - "api-v2", - "dashboards" - ], - "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-dashboards.yaml", - "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-dashboards.yaml" - } - }, - { - "path": "api-v2-dbrps", - "fields": { - "name": "/api/v2/dbrps", - "describes": [ - "/api/v2/dbrps", - "/api/v2/dbrps/{dbrpID}" - ], - "title": "/api/v2/dbrps\nInfluxDB Cloud API Service", - "tags": [ - "api-v2", - "dbrps" - ], - "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-dbrps.yaml", - "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-dbrps.yaml" - } - }, - { - "path": "api-v2-delete", - "fields": { - "name": "/api/v2/delete", - "describes": [ - "/api/v2/delete" - ], - "title": "/api/v2/delete\nInfluxDB Cloud API Service", - "tags": [ - "api-v2", - "delete" - ], - "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-delete.yaml", - "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-delete.yaml" - } - }, - { - "path": "api-v2-flags", - "fields": { - "name": "/api/v2/flags", - "describes": [ - "/api/v2/flags" - ], - "title": "/api/v2/flags\nInfluxDB Cloud API Service", - "tags": [ - "api-v2", - "flags" - ], - "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-flags.yaml", - "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-flags.yaml" - } - }, - { - "path": "api-v2-labels", - "fields": { - "name": "/api/v2/labels", - "describes": [ - "/api/v2/labels", - "/api/v2/labels/{labelID}" - ], - "title": "/api/v2/labels\nInfluxDB Cloud API Service", - "tags": [ - "api-v2", - "labels" - ], - "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-labels.yaml", - "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-labels.yaml" - } - }, - { - "path": "api-v2-maps", - "fields": { - "name": "/api/v2/maps", - "describes": [ - "/api/v2/maps/mapToken" - ], - "title": "/api/v2/maps\nInfluxDB Cloud API Service", - "tags": [ - "api-v2", - "maps" - ], - "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-maps.yaml", - "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-maps.yaml" - } - }, - { - "path": "api-v2-me", - "fields": { - "name": "/api/v2/me", - "describes": [ - "/api/v2/me", - "/api/v2/me/password" - ], - "title": "/api/v2/me\nInfluxDB Cloud API Service", - "tags": [ - "api-v2", - "me" - ], - "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-me.yaml", - "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-me.yaml" - } - }, - { - "path": "api-v2-notificationEndpoints", - "fields": { - "name": "/api/v2/notificationEndpoints", - "describes": [ - "/api/v2/notificationEndpoints", - "/api/v2/notificationEndpoints/{endpointID}", - "/api/v2/notificationEndpoints/{endpointID}/labels", - "/api/v2/notificationEndpoints/{endpointID}/labels/{labelID}" - ], - "title": "/api/v2/notificationEndpoints\nInfluxDB Cloud API Service", - "tags": [ - "api-v2", - "notificationEndpoints" - ], - "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-notificationEndpoints.yaml", - "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-notificationEndpoints.yaml" - } - }, - { - "path": "api-v2-notificationRules", - "fields": { - "name": "/api/v2/notificationRules", - "describes": [ - "/api/v2/notificationRules", - "/api/v2/notificationRules/{ruleID}", - "/api/v2/notificationRules/{ruleID}/labels", - "/api/v2/notificationRules/{ruleID}/labels/{labelID}", - "/api/v2/notificationRules/{ruleID}/query" - ], - "title": "/api/v2/notificationRules\nInfluxDB Cloud API Service", - "tags": [ - "api-v2", - "notificationRules" - ], - "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-notificationRules.yaml", - "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-notificationRules.yaml" - } - }, - { - "path": "api-v2-orgs", - "fields": { - "name": "/api/v2/orgs", - "describes": [ - "/api/v2/orgs", - "/api/v2/orgs/{orgID}", - "/api/v2/orgs/{orgID}/limits", - "/api/v2/orgs/{orgID}/members", - "/api/v2/orgs/{orgID}/members/{userID}", - "/api/v2/orgs/{orgID}/owners", - "/api/v2/orgs/{orgID}/owners/{userID}", - "/api/v2/orgs/{orgID}/secrets", - "/api/v2/orgs/{orgID}/secrets/delete", - "/api/v2/orgs/{orgID}/secrets/{secretID}", - "/api/v2/orgs/{orgID}/usage" - ], - "title": "/api/v2/orgs\nInfluxDB Cloud API Service", - "tags": [ - "api-v2", - "orgs" - ], - "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-orgs.yaml", - "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-orgs.yaml" - } - }, - { - "path": "api-v2-query", - "fields": { - "name": "/api/v2/query", - "describes": [ - "/api/v2/query", - "/api/v2/query/analyze", - "/api/v2/query/ast", - "/api/v2/query/suggestions", - "/api/v2/query/suggestions/{name}" - ], - "title": "/api/v2/query\nInfluxDB Cloud API Service", - "tags": [ - "api-v2", - "query" - ], - "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-query.yaml", - "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-query.yaml" - } - }, - { - "path": "api-v2-resources", - "fields": { - "name": "/api/v2/resources", - "describes": [ - "/api/v2/resources" - ], - "title": "/api/v2/resources\nInfluxDB Cloud API Service", - "tags": [ - "api-v2", - "resources" - ], - "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-resources.yaml", - "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-resources.yaml" - } - }, - { - "path": "api-v2-scripts", - "fields": { - "name": "/api/v2/scripts", - "describes": [ - "/api/v2/scripts", - "/api/v2/scripts/{scriptID}", - "/api/v2/scripts/{scriptID}/invoke", - "/api/v2/scripts/{scriptID}/params" - ], - "title": "/api/v2/scripts\nInfluxDB Cloud API Service", - "tags": [ - "api-v2", - "scripts" - ], - "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-scripts.yaml", - "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-scripts.yaml" - } - }, - { - "path": "api-v2-setup", - "fields": { - "name": "/api/v2/setup", - "describes": [ - "/api/v2/setup", - "/api/v2/setup/user" - ], - "title": "/api/v2/setup\nInfluxDB Cloud API Service", - "tags": [ - "api-v2", - "setup" - ], - "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-setup.yaml", - "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-setup.yaml" - } - }, - { - "path": "api-v2-signin", - "fields": { - "name": "/api/v2/signin", - "describes": [ - "/api/v2/signin" - ], - "title": "/api/v2/signin\nInfluxDB Cloud API Service", - "tags": [ - "api-v2", - "signin" - ], - "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-signin.yaml", - "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-signin.yaml" - } - }, - { - "path": "api-v2-signout", - "fields": { - "name": "/api/v2/signout", - "describes": [ - "/api/v2/signout" - ], - "title": "/api/v2/signout\nInfluxDB Cloud API Service", - "tags": [ - "api-v2", - "signout" - ], - "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-signout.yaml", - "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-signout.yaml" - } - }, - { - "path": "api-v2-stacks", - "fields": { - "name": "/api/v2/stacks", - "describes": [ - "/api/v2/stacks", - "/api/v2/stacks/{stack_id}", - "/api/v2/stacks/{stack_id}/uninstall" - ], - "title": "/api/v2/stacks\nInfluxDB Cloud API Service", - "tags": [ - "api-v2", - "stacks" - ], - "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-stacks.yaml", - "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-stacks.yaml" - } - }, - { - "path": "api-v2-tasks", - "fields": { - "name": "/api/v2/tasks", - "describes": [ - "/api/v2/tasks", - "/api/v2/tasks/{taskID}", - "/api/v2/tasks/{taskID}/labels", - "/api/v2/tasks/{taskID}/labels/{labelID}", - "/api/v2/tasks/{taskID}/logs", - "/api/v2/tasks/{taskID}/members", - "/api/v2/tasks/{taskID}/members/{userID}", - "/api/v2/tasks/{taskID}/owners", - "/api/v2/tasks/{taskID}/owners/{userID}", - "/api/v2/tasks/{taskID}/runs", - "/api/v2/tasks/{taskID}/runs/{runID}", - "/api/v2/tasks/{taskID}/runs/{runID}/logs", - "/api/v2/tasks/{taskID}/runs/{runID}/retry" - ], - "title": "/api/v2/tasks\nInfluxDB Cloud API Service", - "tags": [ - "api-v2", - "tasks" - ], - "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-tasks.yaml", - "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-tasks.yaml" - } - }, - { - "path": "api-v2-telegraf", - "fields": { - "name": "/api/v2/telegraf", - "describes": [ - "/api/v2/telegraf/plugins" - ], - "title": "/api/v2/telegraf\nInfluxDB Cloud API Service", - "tags": [ - "api-v2", - "telegraf" - ], - "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-telegraf.yaml", - "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-telegraf.yaml" - } - }, - { - "path": "api-v2-telegrafs", - "fields": { - "name": "/api/v2/telegrafs", - "describes": [ - "/api/v2/telegrafs", - "/api/v2/telegrafs/{telegrafID}", - "/api/v2/telegrafs/{telegrafID}/labels", - "/api/v2/telegrafs/{telegrafID}/labels/{labelID}", - "/api/v2/telegrafs/{telegrafID}/members", - "/api/v2/telegrafs/{telegrafID}/members/{userID}", - "/api/v2/telegrafs/{telegrafID}/owners", - "/api/v2/telegrafs/{telegrafID}/owners/{userID}" - ], - "title": "/api/v2/telegrafs\nInfluxDB Cloud API Service", - "tags": [ - "api-v2", - "telegrafs" - ], - "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-telegrafs.yaml", - "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-telegrafs.yaml" - } - }, - { - "path": "api-v2-templates", - "fields": { - "name": "/api/v2/templates", - "describes": [ - "/api/v2/templates/apply", - "/api/v2/templates/export" - ], - "title": "/api/v2/templates\nInfluxDB Cloud API Service", - "tags": [ - "api-v2", - "templates" - ], - "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-templates.yaml", - "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-templates.yaml" - } - }, - { - "path": "api-v2-users", - "fields": { - "name": "/api/v2/users", - "describes": [ - "/api/v2/users", - "/api/v2/users/{userID}", - "/api/v2/users/{userID}/password" - ], - "title": "/api/v2/users\nInfluxDB Cloud API Service", - "tags": [ - "api-v2", - "users" - ], - "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-users.yaml", - "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-users.yaml" - } - }, - { - "path": "api-v2-variables", - "fields": { - "name": "/api/v2/variables", - "describes": [ - "/api/v2/variables", - "/api/v2/variables/{variableID}", - "/api/v2/variables/{variableID}/labels", - "/api/v2/variables/{variableID}/labels/{labelID}" - ], - "title": "/api/v2/variables\nInfluxDB Cloud API Service", - "tags": [ - "api-v2", - "variables" - ], - "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-variables.yaml", - "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-variables.yaml" - } - }, - { - "path": "api-v2-write", - "fields": { - "name": "/api/v2/write", - "describes": [ - "/api/v2/write" - ], - "title": "/api/v2/write\nInfluxDB Cloud API Service", - "tags": [ - "api-v2", - "write" - ], - "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2-write.yaml", - "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2-write.yaml" - } - }, - { - "path": "api-v2", - "fields": { - "name": "/api/v2", - "describes": [ - "/api/v2" - ], - "title": "/api/v2\nInfluxDB Cloud API Service", - "tags": [ - "api", - "v2" - ], - "source": "static/openapi/influxdb-cloud-v2/paths/ref-api-v2.yaml", - "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-api-v2.yaml" - } - }, - { - "path": "legacy-authorizations", - "fields": { - "name": "/legacy/authorizations", - "describes": [ - "/legacy/authorizations", - "/legacy/authorizations/{authID}", - "/legacy/authorizations/{authID}/password" - ], - "title": "/legacy/authorizations\nInfluxDB Cloud API Service", - "tags": [ - "legacy", - "authorizations" - ], - "source": "static/openapi/influxdb-cloud-v2/paths/ref-legacy-authorizations.yaml", - "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-legacy-authorizations.yaml" - } - }, - { - "path": "ping", - "fields": { - "name": "/ping", - "describes": [ - "/ping" - ], - "title": "/ping\nInfluxDB Cloud API Service", - "tags": [ - "", - "ping" - ], - "source": "static/openapi/influxdb-cloud-v2/paths/ref-ping.yaml", - "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-ping.yaml" - } - }, - { - "path": "query", - "fields": { - "name": "/query", - "describes": [ - "/query" - ], - "title": "/query\nInfluxDB Cloud API Service", - "tags": [ - "", - "query" - ], - "source": "static/openapi/influxdb-cloud-v2/paths/ref-query.yaml", - "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-query.yaml" - } - }, - { - "path": "write", - "fields": { - "name": "/write", - "describes": [ - "/write" - ], - "title": "/write\nInfluxDB Cloud API Service", - "tags": [ - "", - "write" - ], - "source": "static/openapi/influxdb-cloud-v2/paths/ref-write.yaml", - "staticFilePath": "/openapi/influxdb-cloud-v2/paths/ref-write.yaml" - } - } - ] -} \ No newline at end of file diff --git a/data/article_data/influxdb/cloud-dedicated/articles.yml b/data/article_data/influxdb/cloud-dedicated/articles.yml new file mode 100644 index 0000000000..e655739bf6 --- /dev/null +++ b/data/article_data/influxdb/cloud-dedicated/articles.yml @@ -0,0 +1,24 @@ +articles: + - path: api/accounts/{accountId}/clusters + fields: + name: /accounts/{accountId}/clusters + describes: + - /accounts/{accountId}/clusters/{clusterId}/databases + - /accounts/{accountId}/clusters/{clusterId}/databases/{databaseName} + - >- + /accounts/{accountId}/clusters/{clusterId}/databases/{databaseName}/tables + - /accounts/{accountId}/clusters/{clusterId}/tokens + - /accounts/{accountId}/clusters/{clusterId}/tokens/{tokenId} + menuName: /accounts/{accountId}/clusters + title: /accounts/{accountId}/clusters + tags: + - accounts-{accountId} + - clusters + apiTags: + - Databases + - Tables + - Database tokens + source: >- + static/openapi/influxdb-cloud-dedicated/paths/openapi-accounts-{accountId}-clusters.yaml + staticFilePath: >- + /openapi/influxdb-cloud-dedicated/paths/openapi-accounts-{accountId}-clusters.yaml diff --git a/data/article_data/influxdb/cloud-v2/articles.yml b/data/article_data/influxdb/cloud-v2/articles.yml new file mode 100644 index 0000000000..0f188ea905 --- /dev/null +++ b/data/article_data/influxdb/cloud-v2/articles.yml @@ -0,0 +1,499 @@ +articles: + - path: api/v2/authorizations + fields: + name: /api/v2/authorizations + describes: + - /api/v2/authorizations + - /api/v2/authorizations/{authID} + title: |- + /api/v2/authorizations + InfluxDB Cloud API Service + tags: + - api-v2 + - authorizations + source: static/openapi/influxdb-cloud-v2/paths/ref-api-v2-authorizations.yaml + staticFilePath: /openapi/influxdb-cloud-v2/paths/ref-api-v2-authorizations.yaml + - path: api/v2/buckets + fields: + name: /api/v2/buckets + describes: + - /api/v2/buckets + - /api/v2/buckets/{bucketID} + - /api/v2/buckets/{bucketID}/labels + - /api/v2/buckets/{bucketID}/labels/{labelID} + - /api/v2/buckets/{bucketID}/members + - /api/v2/buckets/{bucketID}/members/{userID} + - /api/v2/buckets/{bucketID}/owners + - /api/v2/buckets/{bucketID}/owners/{userID} + - /api/v2/buckets/{bucketID}/schema/measurements + - /api/v2/buckets/{bucketID}/schema/measurements/{measurementID} + title: |- + /api/v2/buckets + InfluxDB Cloud API Service + tags: + - api-v2 + - buckets + source: static/openapi/influxdb-cloud-v2/paths/ref-api-v2-buckets.yaml + staticFilePath: /openapi/influxdb-cloud-v2/paths/ref-api-v2-buckets.yaml + - path: api/v2/checks + fields: + name: /api/v2/checks + describes: + - /api/v2/checks + - /api/v2/checks/{checkID} + - /api/v2/checks/{checkID}/labels + - /api/v2/checks/{checkID}/labels/{labelID} + - /api/v2/checks/{checkID}/query + title: |- + /api/v2/checks + InfluxDB Cloud API Service + tags: + - api-v2 + - checks + source: static/openapi/influxdb-cloud-v2/paths/ref-api-v2-checks.yaml + staticFilePath: /openapi/influxdb-cloud-v2/paths/ref-api-v2-checks.yaml + - path: api/v2/dashboards + fields: + name: /api/v2/dashboards + describes: + - /api/v2/dashboards + - /api/v2/dashboards/{dashboardID} + - /api/v2/dashboards/{dashboardID}/cells + - /api/v2/dashboards/{dashboardID}/cells/{cellID} + - /api/v2/dashboards/{dashboardID}/cells/{cellID}/view + - /api/v2/dashboards/{dashboardID}/labels + - /api/v2/dashboards/{dashboardID}/labels/{labelID} + - /api/v2/dashboards/{dashboardID}/members + - /api/v2/dashboards/{dashboardID}/members/{userID} + - /api/v2/dashboards/{dashboardID}/owners + - /api/v2/dashboards/{dashboardID}/owners/{userID} + title: |- + /api/v2/dashboards + InfluxDB Cloud API Service + tags: + - api-v2 + - dashboards + source: static/openapi/influxdb-cloud-v2/paths/ref-api-v2-dashboards.yaml + staticFilePath: /openapi/influxdb-cloud-v2/paths/ref-api-v2-dashboards.yaml + - path: api/v2/dbrps + fields: + name: /api/v2/dbrps + describes: + - /api/v2/dbrps + - /api/v2/dbrps/{dbrpID} + title: |- + /api/v2/dbrps + InfluxDB Cloud API Service + tags: + - api-v2 + - dbrps + source: static/openapi/influxdb-cloud-v2/paths/ref-api-v2-dbrps.yaml + staticFilePath: /openapi/influxdb-cloud-v2/paths/ref-api-v2-dbrps.yaml + - path: api/v2/delete + fields: + name: /api/v2/delete + describes: + - /api/v2/delete + title: |- + /api/v2/delete + InfluxDB Cloud API Service + tags: + - api-v2 + - delete + source: static/openapi/influxdb-cloud-v2/paths/ref-api-v2-delete.yaml + staticFilePath: /openapi/influxdb-cloud-v2/paths/ref-api-v2-delete.yaml + - path: api/v2/flags + fields: + name: /api/v2/flags + describes: + - /api/v2/flags + title: |- + /api/v2/flags + InfluxDB Cloud API Service + tags: + - api-v2 + - flags + source: static/openapi/influxdb-cloud-v2/paths/ref-api-v2-flags.yaml + staticFilePath: /openapi/influxdb-cloud-v2/paths/ref-api-v2-flags.yaml + - path: api/v2/labels + fields: + name: /api/v2/labels + describes: + - /api/v2/labels + - /api/v2/labels/{labelID} + title: |- + /api/v2/labels + InfluxDB Cloud API Service + tags: + - api-v2 + - labels + source: static/openapi/influxdb-cloud-v2/paths/ref-api-v2-labels.yaml + staticFilePath: /openapi/influxdb-cloud-v2/paths/ref-api-v2-labels.yaml + - path: api/v2/maps + fields: + name: /api/v2/maps + describes: + - /api/v2/maps/mapToken + title: |- + /api/v2/maps + InfluxDB Cloud API Service + tags: + - api-v2 + - maps + source: static/openapi/influxdb-cloud-v2/paths/ref-api-v2-maps.yaml + staticFilePath: /openapi/influxdb-cloud-v2/paths/ref-api-v2-maps.yaml + - path: api/v2/me + fields: + name: /api/v2/me + describes: + - /api/v2/me + - /api/v2/me/password + title: |- + /api/v2/me + InfluxDB Cloud API Service + tags: + - api-v2 + - me + source: static/openapi/influxdb-cloud-v2/paths/ref-api-v2-me.yaml + staticFilePath: /openapi/influxdb-cloud-v2/paths/ref-api-v2-me.yaml + - path: api/v2/notificationEndpoints + fields: + name: /api/v2/notificationEndpoints + describes: + - /api/v2/notificationEndpoints + - /api/v2/notificationEndpoints/{endpointID} + - /api/v2/notificationEndpoints/{endpointID}/labels + - /api/v2/notificationEndpoints/{endpointID}/labels/{labelID} + title: |- + /api/v2/notificationEndpoints + InfluxDB Cloud API Service + tags: + - api-v2 + - notificationEndpoints + source: >- + static/openapi/influxdb-cloud-v2/paths/ref-api-v2-notificationEndpoints.yaml + staticFilePath: /openapi/influxdb-cloud-v2/paths/ref-api-v2-notificationEndpoints.yaml + - path: api/v2/notificationRules + fields: + name: /api/v2/notificationRules + describes: + - /api/v2/notificationRules + - /api/v2/notificationRules/{ruleID} + - /api/v2/notificationRules/{ruleID}/labels + - /api/v2/notificationRules/{ruleID}/labels/{labelID} + - /api/v2/notificationRules/{ruleID}/query + title: |- + /api/v2/notificationRules + InfluxDB Cloud API Service + tags: + - api-v2 + - notificationRules + source: static/openapi/influxdb-cloud-v2/paths/ref-api-v2-notificationRules.yaml + staticFilePath: /openapi/influxdb-cloud-v2/paths/ref-api-v2-notificationRules.yaml + - path: api/v2/orgs + fields: + name: /api/v2/orgs + describes: + - /api/v2/orgs + - /api/v2/orgs/{orgID} + - /api/v2/orgs/{orgID}/limits + - /api/v2/orgs/{orgID}/members + - /api/v2/orgs/{orgID}/members/{userID} + - /api/v2/orgs/{orgID}/owners + - /api/v2/orgs/{orgID}/owners/{userID} + - /api/v2/orgs/{orgID}/secrets + - /api/v2/orgs/{orgID}/secrets/delete + - /api/v2/orgs/{orgID}/secrets/{secretID} + - /api/v2/orgs/{orgID}/usage + title: |- + /api/v2/orgs + InfluxDB Cloud API Service + tags: + - api-v2 + - orgs + source: static/openapi/influxdb-cloud-v2/paths/ref-api-v2-orgs.yaml + staticFilePath: /openapi/influxdb-cloud-v2/paths/ref-api-v2-orgs.yaml + - path: api/v2/query + fields: + name: /api/v2/query + describes: + - /api/v2/query + - /api/v2/query/analyze + - /api/v2/query/ast + - /api/v2/query/suggestions + - /api/v2/query/suggestions/{name} + title: |- + /api/v2/query + InfluxDB Cloud API Service + tags: + - api-v2 + - query + source: static/openapi/influxdb-cloud-v2/paths/ref-api-v2-query.yaml + staticFilePath: /openapi/influxdb-cloud-v2/paths/ref-api-v2-query.yaml + - path: api/v2/resources + fields: + name: /api/v2/resources + describes: + - /api/v2/resources + title: |- + /api/v2/resources + InfluxDB Cloud API Service + tags: + - api-v2 + - resources + source: static/openapi/influxdb-cloud-v2/paths/ref-api-v2-resources.yaml + staticFilePath: /openapi/influxdb-cloud-v2/paths/ref-api-v2-resources.yaml + - path: api/v2/scripts + fields: + name: /api/v2/scripts + describes: + - /api/v2/scripts + - /api/v2/scripts/{scriptID} + - /api/v2/scripts/{scriptID}/invoke + - /api/v2/scripts/{scriptID}/params + title: |- + /api/v2/scripts + InfluxDB Cloud API Service + tags: + - api-v2 + - scripts + source: static/openapi/influxdb-cloud-v2/paths/ref-api-v2-scripts.yaml + staticFilePath: /openapi/influxdb-cloud-v2/paths/ref-api-v2-scripts.yaml + - path: api/v2/setup + fields: + name: /api/v2/setup + describes: + - /api/v2/setup + - /api/v2/setup/user + title: |- + /api/v2/setup + InfluxDB Cloud API Service + tags: + - api-v2 + - setup + source: static/openapi/influxdb-cloud-v2/paths/ref-api-v2-setup.yaml + staticFilePath: /openapi/influxdb-cloud-v2/paths/ref-api-v2-setup.yaml + - path: api/v2/signin + fields: + name: /api/v2/signin + describes: + - /api/v2/signin + title: |- + /api/v2/signin + InfluxDB Cloud API Service + tags: + - api-v2 + - signin + source: static/openapi/influxdb-cloud-v2/paths/ref-api-v2-signin.yaml + staticFilePath: /openapi/influxdb-cloud-v2/paths/ref-api-v2-signin.yaml + - path: api/v2/signout + fields: + name: /api/v2/signout + describes: + - /api/v2/signout + title: |- + /api/v2/signout + InfluxDB Cloud API Service + tags: + - api-v2 + - signout + source: static/openapi/influxdb-cloud-v2/paths/ref-api-v2-signout.yaml + staticFilePath: /openapi/influxdb-cloud-v2/paths/ref-api-v2-signout.yaml + - path: api/v2/stacks + fields: + name: /api/v2/stacks + describes: + - /api/v2/stacks + - /api/v2/stacks/{stack_id} + - /api/v2/stacks/{stack_id}/uninstall + title: |- + /api/v2/stacks + InfluxDB Cloud API Service + tags: + - api-v2 + - stacks + source: static/openapi/influxdb-cloud-v2/paths/ref-api-v2-stacks.yaml + staticFilePath: /openapi/influxdb-cloud-v2/paths/ref-api-v2-stacks.yaml + - path: api/v2/tasks + fields: + name: /api/v2/tasks + describes: + - /api/v2/tasks + - /api/v2/tasks/{taskID} + - /api/v2/tasks/{taskID}/labels + - /api/v2/tasks/{taskID}/labels/{labelID} + - /api/v2/tasks/{taskID}/logs + - /api/v2/tasks/{taskID}/members + - /api/v2/tasks/{taskID}/members/{userID} + - /api/v2/tasks/{taskID}/owners + - /api/v2/tasks/{taskID}/owners/{userID} + - /api/v2/tasks/{taskID}/runs + - /api/v2/tasks/{taskID}/runs/{runID} + - /api/v2/tasks/{taskID}/runs/{runID}/logs + - /api/v2/tasks/{taskID}/runs/{runID}/retry + title: |- + /api/v2/tasks + InfluxDB Cloud API Service + tags: + - api-v2 + - tasks + source: static/openapi/influxdb-cloud-v2/paths/ref-api-v2-tasks.yaml + staticFilePath: /openapi/influxdb-cloud-v2/paths/ref-api-v2-tasks.yaml + - path: api/v2/telegraf + fields: + name: /api/v2/telegraf + describes: + - /api/v2/telegraf/plugins + title: |- + /api/v2/telegraf + InfluxDB Cloud API Service + tags: + - api-v2 + - telegraf + source: static/openapi/influxdb-cloud-v2/paths/ref-api-v2-telegraf.yaml + staticFilePath: /openapi/influxdb-cloud-v2/paths/ref-api-v2-telegraf.yaml + - path: api/v2/telegrafs + fields: + name: /api/v2/telegrafs + describes: + - /api/v2/telegrafs + - /api/v2/telegrafs/{telegrafID} + - /api/v2/telegrafs/{telegrafID}/labels + - /api/v2/telegrafs/{telegrafID}/labels/{labelID} + - /api/v2/telegrafs/{telegrafID}/members + - /api/v2/telegrafs/{telegrafID}/members/{userID} + - /api/v2/telegrafs/{telegrafID}/owners + - /api/v2/telegrafs/{telegrafID}/owners/{userID} + title: |- + /api/v2/telegrafs + InfluxDB Cloud API Service + tags: + - api-v2 + - telegrafs + source: static/openapi/influxdb-cloud-v2/paths/ref-api-v2-telegrafs.yaml + staticFilePath: /openapi/influxdb-cloud-v2/paths/ref-api-v2-telegrafs.yaml + - path: api/v2/templates + fields: + name: /api/v2/templates + describes: + - /api/v2/templates/apply + - /api/v2/templates/export + title: |- + /api/v2/templates + InfluxDB Cloud API Service + tags: + - api-v2 + - templates + source: static/openapi/influxdb-cloud-v2/paths/ref-api-v2-templates.yaml + staticFilePath: /openapi/influxdb-cloud-v2/paths/ref-api-v2-templates.yaml + - path: api/v2/users + fields: + name: /api/v2/users + describes: + - /api/v2/users + - /api/v2/users/{userID} + - /api/v2/users/{userID}/password + title: |- + /api/v2/users + InfluxDB Cloud API Service + tags: + - api-v2 + - users + source: static/openapi/influxdb-cloud-v2/paths/ref-api-v2-users.yaml + staticFilePath: /openapi/influxdb-cloud-v2/paths/ref-api-v2-users.yaml + - path: api/v2/variables + fields: + name: /api/v2/variables + describes: + - /api/v2/variables + - /api/v2/variables/{variableID} + - /api/v2/variables/{variableID}/labels + - /api/v2/variables/{variableID}/labels/{labelID} + title: |- + /api/v2/variables + InfluxDB Cloud API Service + tags: + - api-v2 + - variables + source: static/openapi/influxdb-cloud-v2/paths/ref-api-v2-variables.yaml + staticFilePath: /openapi/influxdb-cloud-v2/paths/ref-api-v2-variables.yaml + - path: api/v2/write + fields: + name: /api/v2/write + describes: + - /api/v2/write + title: |- + /api/v2/write + InfluxDB Cloud API Service + tags: + - api-v2 + - write + source: static/openapi/influxdb-cloud-v2/paths/ref-api-v2-write.yaml + staticFilePath: /openapi/influxdb-cloud-v2/paths/ref-api-v2-write.yaml + - path: api/v2 + fields: + name: /api/v2 + describes: + - /api/v2 + title: |- + /api/v2 + InfluxDB Cloud API Service + tags: + - api + - v2 + source: static/openapi/influxdb-cloud-v2/paths/ref-api-v2.yaml + staticFilePath: /openapi/influxdb-cloud-v2/paths/ref-api-v2.yaml + - path: legacy/authorizations + fields: + name: /legacy/authorizations + describes: + - /legacy/authorizations + - /legacy/authorizations/{authID} + - /legacy/authorizations/{authID}/password + title: |- + /legacy/authorizations + InfluxDB Cloud API Service + tags: + - legacy + - authorizations + source: static/openapi/influxdb-cloud-v2/paths/ref-legacy-authorizations.yaml + staticFilePath: /openapi/influxdb-cloud-v2/paths/ref-legacy-authorizations.yaml + - path: ping + fields: + name: /ping + describes: + - /ping + title: |- + /ping + InfluxDB Cloud API Service + tags: + - '' + - ping + source: static/openapi/influxdb-cloud-v2/paths/ref-ping.yaml + staticFilePath: /openapi/influxdb-cloud-v2/paths/ref-ping.yaml + - path: query + fields: + name: /query + describes: + - /query + title: |- + /query + InfluxDB Cloud API Service + tags: + - '' + - query + source: static/openapi/influxdb-cloud-v2/paths/ref-query.yaml + staticFilePath: /openapi/influxdb-cloud-v2/paths/ref-query.yaml + - path: write + fields: + name: /write + describes: + - /write + title: |- + /write + InfluxDB Cloud API Service + tags: + - '' + - write + source: static/openapi/influxdb-cloud-v2/paths/ref-write.yaml + staticFilePath: /openapi/influxdb-cloud-v2/paths/ref-write.yaml diff --git a/data/article_data/influxdb/clustered/articles.yml b/data/article_data/influxdb/clustered/articles.yml new file mode 100644 index 0000000000..b2d6347131 --- /dev/null +++ b/data/article_data/influxdb/clustered/articles.yml @@ -0,0 +1,47 @@ +articles: + - path: api/databases/{databaseName}/tables + fields: + name: /databases/{databaseName}/tables + describes: + - /databases/{databaseName}/tables + menuName: /databases/{databaseName}/tables + title: /databases/{databaseName}/tables + tags: + - databases-{databaseName} + - tables + apiTags: + - Tables + source: >- + static/openapi/influxdb-clustered/paths/openapi-databases-{databaseName}-tables.yaml + staticFilePath: >- + /openapi/influxdb-clustered/paths/openapi-databases-{databaseName}-tables.yaml + - path: api/databases + fields: + name: /databases + describes: + - /databases + - /databases/{databaseName} + menuName: /databases + title: /databases + tags: + - '' + - databases + apiTags: + - Databases + source: static/openapi/influxdb-clustered/paths/openapi-databases.yaml + staticFilePath: /openapi/influxdb-clustered/paths/openapi-databases.yaml + - path: api/tokens + fields: + name: /tokens + describes: + - /tokens + - /tokens/{tokenId} + menuName: /tokens + title: /tokens + tags: + - '' + - tokens + apiTags: + - Database tokens + source: static/openapi/influxdb-clustered/paths/openapi-tokens.yaml + staticFilePath: /openapi/influxdb-clustered/paths/openapi-tokens.yaml diff --git a/data/article_data/influxdb/influxdb3_core/articles.yml b/data/article_data/influxdb/influxdb3_core/articles.yml new file mode 100644 index 0000000000..75f32fb05c --- /dev/null +++ b/data/article_data/influxdb/influxdb3_core/articles.yml @@ -0,0 +1,1094 @@ +articles: + - path: api/authentication + fields: + name: Authentication + describes: + - /api/v3/configure/token/admin + - /api/v3/configure/token/admin/regenerate + - /api/v3/configure/token + - /api/v3/configure/token/named_admin + title: Authentication + description: > + Depending on your workflow, use one of the following schemes to + authenticate to the InfluxDB 3 API: + + + | Authentication scheme | Works with | + + |:----------------------|:-----------| + + | Bearer authentication | All endpoints | + + | Token authentication | v1, v2 endpoints | + + | Basic authentication | v1 endpoints | + + | Querystring authentication | v1 endpoints | + + + See the **Security Schemes** section below for details on each + authentication method. + tag: Authentication + isConceptual: true + menuGroup: Concepts + operations: + - operationId: PostCreateAdminToken + method: POST + path: /api/v3/configure/token/admin + summary: Create admin token + tags: + - Authentication + - Token + - operationId: PostRegenerateAdminToken + method: POST + path: /api/v3/configure/token/admin/regenerate + summary: Regenerate admin token + tags: + - Authentication + - Token + - operationId: DeleteToken + method: DELETE + path: /api/v3/configure/token + summary: Delete token + tags: + - Authentication + - Token + - operationId: PostCreateNamedAdminToken + method: POST + path: /api/v3/configure/token/named_admin + summary: Create named admin token + tags: + - Authentication + - Token + tagDescription: > + Depending on your workflow, use one of the following schemes to + authenticate to the InfluxDB 3 API: + + + | Authentication scheme | Works with | + + |:----------------------|:-----------| + + | Bearer authentication | All endpoints | + + | Token authentication | v1, v2 endpoints | + + | Basic authentication | v1 endpoints | + + | Querystring authentication | v1 endpoints | + + + See the **Security Schemes** section below for details on each + authentication method. + source: static/openapi/influxdb-influxdb3-core/tags/tags/ref-authentication.yaml + staticFilePath: /openapi/influxdb-influxdb3-core/tags/tags/ref-authentication.yaml + - path: api/cache-data + fields: + name: Cache data + describes: + - /api/v3/configure/distinct_cache + - /api/v3/configure/last_cache + title: Cache data + description: > + Manage the in-memory cache. + + + #### Distinct Value Cache + + + The Distinct Value Cache (DVC) lets you cache distinct + + values of one or more columns in a table, improving the performance of + + queries that return distinct tag and field values. + + + The DVC is an in-memory cache that stores distinct values for specific + columns + + in a table. When you create an DVC, you can specify what columns' + distinct + + values to cache, the maximum number of distinct value combinations to + cache, and + + the maximum age of cached values. A DVC is associated with a table, + which can + + have multiple DVCs. + + + #### Last value cache + + + The Last Value Cache (LVC) lets you cache the most recent + + values for specific fields in a table, improving the performance of + queries that + + return the most recent value of a field for specific series or the last + N values + + of a field. + + + The LVC is an in-memory cache that stores the last N number of values + for + + specific fields of series in a table. When you create an LVC, you can + specify + + what fields to cache, what tags to use to identify each series, and the + + number of values to cache for each unique series. + + An LVC is associated with a table, which can have multiple LVCs. + + + #### Related guides + + + - [Manage the Distinct Value + Cache](/influxdb3/core/admin/distinct-value-cache/) + + - [Manage the Last Value Cache](/influxdb3/core/admin/last-value-cache/) + tag: Cache data + isConceptual: false + menuGroup: Data Operations + operations: + - operationId: PostConfigureDistinctCache + method: POST + path: /api/v3/configure/distinct_cache + summary: Create distinct cache + tags: + - Cache data + - Table + - operationId: DeleteConfigureDistinctCache + method: DELETE + path: /api/v3/configure/distinct_cache + summary: Delete distinct cache + tags: + - Cache data + - Table + - operationId: PostConfigureLastCache + method: POST + path: /api/v3/configure/last_cache + summary: Create last cache + tags: + - Cache data + - Table + - operationId: DeleteConfigureLastCache + method: DELETE + path: /api/v3/configure/last_cache + summary: Delete last cache + tags: + - Cache data + - Table + tagDescription: > + Manage the in-memory cache. + + + #### Distinct Value Cache + + + The Distinct Value Cache (DVC) lets you cache distinct + + values of one or more columns in a table, improving the performance of + + queries that return distinct tag and field values. + + + The DVC is an in-memory cache that stores distinct values for specific + columns + + in a table. When you create an DVC, you can specify what columns' + distinct + + values to cache, the maximum number of distinct value combinations to + cache, and + + the maximum age of cached values. A DVC is associated with a table, + which can + + have multiple DVCs. + + + #### Last value cache + + + The Last Value Cache (LVC) lets you cache the most recent + + values for specific fields in a table, improving the performance of + queries that + + return the most recent value of a field for specific series or the last + N values + + of a field. + + + The LVC is an in-memory cache that stores the last N number of values + for + + specific fields of series in a table. When you create an LVC, you can + specify + + what fields to cache, what tags to use to identify each series, and the + + number of values to cache for each unique series. + + An LVC is associated with a table, which can have multiple LVCs. + + + #### Related guides + + + - [Manage the Distinct Value + Cache](/influxdb3/core/admin/distinct-value-cache/) + + - [Manage the Last Value Cache](/influxdb3/core/admin/last-value-cache/) + source: static/openapi/influxdb-influxdb3-core/tags/tags/ref-cache-data.yaml + staticFilePath: /openapi/influxdb-influxdb3-core/tags/tags/ref-cache-data.yaml + - path: api/compatibility-endpoints + fields: + name: Compatibility endpoints + describes: + - /write + - /api/v2/write + - /query + - /api/v1/health + title: Compatibility endpoints + description: > + InfluxDB 3 provides compatibility endpoints for InfluxDB 1.x and + InfluxDB 2.x workloads and clients. + + + ### Write data using v1- or v2-compatible endpoints + + + - [`/api/v2/write` endpoint](#operation/PostV2Write) + for InfluxDB v2 clients and when you bring existing InfluxDB v2 write workloads to InfluxDB 3. + - [`/write` endpoint](#operation/PostV1Write) for InfluxDB v1 clients + and when you bring existing InfluxDB v1 write workloads to InfluxDB 3. + + + For new workloads, use the [`/api/v3/write_lp` + endpoint](#operation/PostWriteLP). + + + All endpoints accept the same line protocol format. + + + ### Query data + + + Use the HTTP [`/query`](#operation/GetV1ExecuteQuery) endpoint for + InfluxDB v1 clients and v1 query workloads using InfluxQL. + + + For new workloads, use one of the following: + + + - HTTP [`/api/v3/query_sql` endpoint](#operation/GetExecuteQuerySQL) for + new query workloads using SQL. + + - HTTP [`/api/v3/query_influxql` + endpoint](#operation/GetExecuteInfluxQLQuery) for new query workloads + using InfluxQL. + + - Flight SQL and InfluxDB 3 _Flight+gRPC_ APIs for querying with SQL or + InfluxQL. For more information about using Flight APIs, see [InfluxDB 3 + client + libraries](https://github.com/InfluxCommunity?q=influxdb3&type=public&language=&sort=). + + + ### Server information + + + Server information endpoints such as `/health` and `metrics` are + compatible with InfluxDB 1.x and InfluxDB 2.x clients. + tag: Compatibility endpoints + isConceptual: false + menuGroup: Compatibility + operations: + - operationId: PostV1Write + method: POST + path: /write + summary: Write line protocol (v1-compatible) + tags: + - Compatibility endpoints + - Write data + - operationId: PostV2Write + method: POST + path: /api/v2/write + summary: Write line protocol (v2-compatible) + tags: + - Compatibility endpoints + - Write data + - operationId: GetV1ExecuteQuery + method: GET + path: /query + summary: Execute InfluxQL query (v1-compatible) + tags: + - Query data + - Compatibility endpoints + - operationId: PostExecuteV1Query + method: POST + path: /query + summary: Execute InfluxQL query (v1-compatible) + tags: + - Query data + - Compatibility endpoints + - operationId: GetHealthV1 + method: GET + path: /api/v1/health + summary: Health check (v1) + tags: + - Server information + - Compatibility endpoints + tagDescription: > + InfluxDB 3 provides compatibility endpoints for InfluxDB 1.x and + InfluxDB 2.x workloads and clients. + + + ### Write data using v1- or v2-compatible endpoints + + + - [`/api/v2/write` endpoint](#operation/PostV2Write) + for InfluxDB v2 clients and when you bring existing InfluxDB v2 write workloads to InfluxDB 3. + - [`/write` endpoint](#operation/PostV1Write) for InfluxDB v1 clients + and when you bring existing InfluxDB v1 write workloads to InfluxDB 3. + + + For new workloads, use the [`/api/v3/write_lp` + endpoint](#operation/PostWriteLP). + + + All endpoints accept the same line protocol format. + + + ### Query data + + + Use the HTTP [`/query`](#operation/GetV1ExecuteQuery) endpoint for + InfluxDB v1 clients and v1 query workloads using InfluxQL. + + + For new workloads, use one of the following: + + + - HTTP [`/api/v3/query_sql` endpoint](#operation/GetExecuteQuerySQL) for + new query workloads using SQL. + + - HTTP [`/api/v3/query_influxql` + endpoint](#operation/GetExecuteInfluxQLQuery) for new query workloads + using InfluxQL. + + - Flight SQL and InfluxDB 3 _Flight+gRPC_ APIs for querying with SQL or + InfluxQL. For more information about using Flight APIs, see [InfluxDB 3 + client + libraries](https://github.com/InfluxCommunity?q=influxdb3&type=public&language=&sort=). + + + ### Server information + + + Server information endpoints such as `/health` and `metrics` are + compatible with InfluxDB 1.x and InfluxDB 2.x clients. + source: >- + static/openapi/influxdb-influxdb3-core/tags/tags/ref-compatibility-endpoints.yaml + staticFilePath: >- + /openapi/influxdb-influxdb3-core/tags/tags/ref-compatibility-endpoints.yaml + - path: api/database + fields: + name: Database + describes: + - /api/v3/configure/database + - /api/v3/configure/database/retention_period + title: Database + description: Manage databases + tag: Database + isConceptual: false + menuGroup: Administration + operations: + - operationId: GetConfigureDatabase + method: GET + path: /api/v3/configure/database + summary: List databases + tags: + - Database + - operationId: PostConfigureDatabase + method: POST + path: /api/v3/configure/database + summary: Create a database + tags: + - Database + - operationId: DeleteConfigureDatabase + method: DELETE + path: /api/v3/configure/database + summary: Delete a database + tags: + - Database + - operationId: DeleteDatabaseRetentionPeriod + method: DELETE + path: /api/v3/configure/database/retention_period + summary: Remove database retention period + tags: + - Database + tagDescription: Manage databases + source: static/openapi/influxdb-influxdb3-core/tags/tags/ref-database.yaml + staticFilePath: /openapi/influxdb-influxdb3-core/tags/tags/ref-database.yaml + - path: api/headers-and-parameters + fields: + name: Headers and parameters + describes: [] + title: Headers and parameters + description: > + Most InfluxDB API endpoints require parameters in the request--for + example, specifying the database to use. + + + ### Common parameters + + + The following table shows common parameters used by many InfluxDB API + endpoints. + + Many endpoints may require other parameters in the query string or in + the + + request body that perform functions specific to those endpoints. + + + | Query parameter | Value type | + Description | + + |:------------------------ |:--------------------- + |:-------------------------------------------| + + | `db` | string | The database name | + + + InfluxDB HTTP API endpoints use standard HTTP request and response + headers. + + The following table shows common headers used by many InfluxDB API + endpoints. + + Some endpoints may use other headers that perform functions more + specific to those endpoints--for example, + + the write endpoints accept the `Content-Encoding` header to indicate + that line protocol is compressed in the request body. + + + | Header | Value type | + Description | + + |:------------------------ |:--------------------- + |:-------------------------------------------| + + | `Accept` | string | The content type + that the client can understand. | + + | `Authorization` | string | The authorization + scheme and credential. | + + | `Content-Length` | integer | The size of the + entity-body, in bytes. | + + | `Content-Type` | string | The format of the + data in the request body. | + tag: Headers and parameters + isConceptual: true + menuGroup: Concepts + operations: [] + tagDescription: > + Most InfluxDB API endpoints require parameters in the request--for + example, specifying the database to use. + + + ### Common parameters + + + The following table shows common parameters used by many InfluxDB API + endpoints. + + Many endpoints may require other parameters in the query string or in + the + + request body that perform functions specific to those endpoints. + + + | Query parameter | Value type | + Description | + + |:------------------------ |:--------------------- + |:-------------------------------------------| + + | `db` | string | The database name | + + + InfluxDB HTTP API endpoints use standard HTTP request and response + headers. + + The following table shows common headers used by many InfluxDB API + endpoints. + + Some endpoints may use other headers that perform functions more + specific to those endpoints--for example, + + the write endpoints accept the `Content-Encoding` header to indicate + that line protocol is compressed in the request body. + + + | Header | Value type | + Description | + + |:------------------------ |:--------------------- + |:-------------------------------------------| + + | `Accept` | string | The content type + that the client can understand. | + + | `Authorization` | string | The authorization + scheme and credential. | + + | `Content-Length` | integer | The size of the + entity-body, in bytes. | + + | `Content-Type` | string | The format of the + data in the request body. | + source: >- + static/openapi/influxdb-influxdb3-core/tags/tags/ref-headers-and-parameters.yaml + staticFilePath: >- + /openapi/influxdb-influxdb3-core/tags/tags/ref-headers-and-parameters.yaml + - path: api/processing-engine + fields: + name: Processing engine + describes: + - /api/v3/configure/processing_engine_trigger + - /api/v3/configure/processing_engine_trigger/disable + - /api/v3/configure/processing_engine_trigger/enable + - /api/v3/configure/plugin_environment/install_packages + - /api/v3/configure/plugin_environment/install_requirements + - /api/v3/plugin_test/wal + - /api/v3/plugin_test/schedule + - /api/v3/engine/{request_path} + - /api/v3/plugins/files + - /api/v3/plugins/directory + title: Processing engine + description: > + Manage Processing engine triggers, test plugins, and send requests to + trigger On Request plugins. + + + InfluxDB 3 Core provides the InfluxDB 3 processing engine, an embedded + Python VM that can dynamically load and trigger Python plugins in + response to events in your database. + + Use Processing engine plugins and triggers to run code and perform tasks + for different database events. + + + To get started with the processing engine, see the [Processing engine + and Python plugins](/influxdb3/core/processing-engine/) guide. + tag: Processing engine + isConceptual: false + menuGroup: Processing Engine + operations: + - operationId: PostConfigureProcessingEngineTrigger + method: POST + path: /api/v3/configure/processing_engine_trigger + summary: Create processing engine trigger + tags: + - Processing engine + - operationId: DeleteConfigureProcessingEngineTrigger + method: DELETE + path: /api/v3/configure/processing_engine_trigger + summary: Delete processing engine trigger + tags: + - Processing engine + - operationId: PostDisableProcessingEngineTrigger + method: POST + path: /api/v3/configure/processing_engine_trigger/disable + summary: Disable processing engine trigger + tags: + - Processing engine + - operationId: PostEnableProcessingEngineTrigger + method: POST + path: /api/v3/configure/processing_engine_trigger/enable + summary: Enable processing engine trigger + tags: + - Processing engine + - operationId: PostInstallPluginPackages + method: POST + path: /api/v3/configure/plugin_environment/install_packages + summary: Install plugin packages + tags: + - Processing engine + - operationId: PostInstallPluginRequirements + method: POST + path: /api/v3/configure/plugin_environment/install_requirements + summary: Install plugin requirements + tags: + - Processing engine + - operationId: PostTestWALPlugin + method: POST + path: /api/v3/plugin_test/wal + summary: Test WAL plugin + tags: + - Processing engine + - operationId: PostTestSchedulingPlugin + method: POST + path: /api/v3/plugin_test/schedule + summary: Test scheduling plugin + tags: + - Processing engine + - operationId: GetProcessingEnginePluginRequest + method: GET + path: /api/v3/engine/{request_path} + summary: On Request processing engine plugin request + tags: + - Processing engine + - operationId: PostProcessingEnginePluginRequest + method: POST + path: /api/v3/engine/{request_path} + summary: On Request processing engine plugin request + tags: + - Processing engine + - operationId: PutPluginFile + method: PUT + path: /api/v3/plugins/files + summary: Update plugin file + tags: + - Processing engine + - operationId: PutPluginDirectory + method: PUT + path: /api/v3/plugins/directory + summary: Update plugin directory + tags: + - Processing engine + tagDescription: > + Manage Processing engine triggers, test plugins, and send requests to + trigger On Request plugins. + + + InfluxDB 3 Core provides the InfluxDB 3 processing engine, an embedded + Python VM that can dynamically load and trigger Python plugins in + response to events in your database. + + Use Processing engine plugins and triggers to run code and perform tasks + for different database events. + + + To get started with the processing engine, see the [Processing engine + and Python plugins](/influxdb3/core/processing-engine/) guide. + source: >- + static/openapi/influxdb-influxdb3-core/tags/tags/ref-processing-engine.yaml + staticFilePath: /openapi/influxdb-influxdb3-core/tags/tags/ref-processing-engine.yaml + - path: api/query-data + fields: + name: Query data + describes: + - /api/v3/query_sql + - /api/v3/query_influxql + - /query + title: Query data + description: Query data using SQL or InfluxQL + tag: Query data + isConceptual: false + menuGroup: Data Operations + operations: + - operationId: GetExecuteQuerySQL + method: GET + path: /api/v3/query_sql + summary: Execute SQL query + tags: + - Query data + - operationId: PostExecuteQuerySQL + method: POST + path: /api/v3/query_sql + summary: Execute SQL query + tags: + - Query data + - operationId: GetExecuteInfluxQLQuery + method: GET + path: /api/v3/query_influxql + summary: Execute InfluxQL query + tags: + - Query data + - operationId: PostExecuteQueryInfluxQL + method: POST + path: /api/v3/query_influxql + summary: Execute InfluxQL query + tags: + - Query data + - operationId: GetV1ExecuteQuery + method: GET + path: /query + summary: Execute InfluxQL query (v1-compatible) + tags: + - Query data + - Compatibility endpoints + - operationId: PostExecuteV1Query + method: POST + path: /query + summary: Execute InfluxQL query (v1-compatible) + tags: + - Query data + - Compatibility endpoints + tagDescription: Query data using SQL or InfluxQL + source: static/openapi/influxdb-influxdb3-core/tags/tags/ref-query-data.yaml + staticFilePath: /openapi/influxdb-influxdb3-core/tags/tags/ref-query-data.yaml + - path: api/quick-start + fields: + name: Quick start + describes: [] + title: Quick start + description: > + 1. [Create an admin token](#section/Authentication) to authorize API + requests. + + ```bash + curl -X POST "http://localhost:8181/api/v3/configure/token/admin" + ``` + 2. [Check the status](#section/Server-information) of the InfluxDB + server. + + ```bash + curl "http://localhost:8181/health" \ + --header "Authorization: Bearer ADMIN_TOKEN" + ``` + + 3. [Write data](#operation/PostWriteLP) to InfluxDB. + + ```bash + curl "http://localhost:8181/api/v3/write_lp?db=sensors&precision=auto" + --header "Authorization: Bearer ADMIN_TOKEN" \ + --data-raw "home,room=Kitchen temp=72.0 + home,room=Living\ room temp=71.5" + ``` + + If all data is written, the response is `204 No Content`. + + 4. [Query data](#operation/GetExecuteQuerySQL) from InfluxDB. + + ```bash + curl -G "http://localhost:8181/api/v3/query_sql" \ + --header "Authorization: Bearer ADMIN_TOKEN" \ + --data-urlencode "db=sensors" \ + --data-urlencode "q=SELECT * FROM home WHERE room='Living room'" \ + --data-urlencode "format=jsonl" + ``` + + Output: + + ```jsonl + {"room":"Living room","temp":71.5,"time":"2025-02-25T20:19:34.984098"} + ``` + + For more information about using InfluxDB 3 Core, see the [Get + started](/influxdb3/core/get-started/) guide. + tag: Quick start + isConceptual: true + menuGroup: Concepts + operations: [] + tagDescription: > + 1. [Create an admin token](#section/Authentication) to authorize API + requests. + + ```bash + curl -X POST "http://localhost:8181/api/v3/configure/token/admin" + ``` + 2. [Check the status](#section/Server-information) of the InfluxDB + server. + + ```bash + curl "http://localhost:8181/health" \ + --header "Authorization: Bearer ADMIN_TOKEN" + ``` + + 3. [Write data](#operation/PostWriteLP) to InfluxDB. + + ```bash + curl "http://localhost:8181/api/v3/write_lp?db=sensors&precision=auto" + --header "Authorization: Bearer ADMIN_TOKEN" \ + --data-raw "home,room=Kitchen temp=72.0 + home,room=Living\ room temp=71.5" + ``` + + If all data is written, the response is `204 No Content`. + + 4. [Query data](#operation/GetExecuteQuerySQL) from InfluxDB. + + ```bash + curl -G "http://localhost:8181/api/v3/query_sql" \ + --header "Authorization: Bearer ADMIN_TOKEN" \ + --data-urlencode "db=sensors" \ + --data-urlencode "q=SELECT * FROM home WHERE room='Living room'" \ + --data-urlencode "format=jsonl" + ``` + + Output: + + ```jsonl + {"room":"Living room","temp":71.5,"time":"2025-02-25T20:19:34.984098"} + ``` + + For more information about using InfluxDB 3 Core, see the [Get + started](/influxdb3/core/get-started/) guide. + source: static/openapi/influxdb-influxdb3-core/tags/tags/ref-quick-start.yaml + staticFilePath: /openapi/influxdb-influxdb3-core/tags/tags/ref-quick-start.yaml + - path: api/server-information + fields: + name: Server information + describes: + - /health + - /api/v1/health + - /ping + - /metrics + title: Server information + description: Retrieve server metrics, status, and version information + tag: Server information + isConceptual: false + menuGroup: Server + operations: + - operationId: GetHealth + method: GET + path: /health + summary: Health check + tags: + - Server information + - operationId: GetHealthV1 + method: GET + path: /api/v1/health + summary: Health check (v1) + tags: + - Server information + - Compatibility endpoints + - operationId: GetPing + method: GET + path: /ping + summary: Ping the server + tags: + - Server information + - operationId: GetMetrics + method: GET + path: /metrics + summary: Metrics + tags: + - Server information + tagDescription: Retrieve server metrics, status, and version information + source: >- + static/openapi/influxdb-influxdb3-core/tags/tags/ref-server-information.yaml + staticFilePath: /openapi/influxdb-influxdb3-core/tags/tags/ref-server-information.yaml + - path: api/table + fields: + name: Table + describes: + - /api/v3/configure/table + - /api/v3/configure/distinct_cache + - /api/v3/configure/last_cache + title: Table + description: Manage table schemas and data + tag: Table + isConceptual: false + menuGroup: Administration + operations: + - operationId: PostConfigureTable + method: POST + path: /api/v3/configure/table + summary: Create a table + tags: + - Table + - operationId: DeleteConfigureTable + method: DELETE + path: /api/v3/configure/table + summary: Delete a table + tags: + - Table + - operationId: PostConfigureDistinctCache + method: POST + path: /api/v3/configure/distinct_cache + summary: Create distinct cache + tags: + - Cache data + - Table + - operationId: DeleteConfigureDistinctCache + method: DELETE + path: /api/v3/configure/distinct_cache + summary: Delete distinct cache + tags: + - Cache data + - Table + - operationId: PostConfigureLastCache + method: POST + path: /api/v3/configure/last_cache + summary: Create last cache + tags: + - Cache data + - Table + - operationId: DeleteConfigureLastCache + method: DELETE + path: /api/v3/configure/last_cache + summary: Delete last cache + tags: + - Cache data + - Table + tagDescription: Manage table schemas and data + source: static/openapi/influxdb-influxdb3-core/tags/tags/ref-table.yaml + staticFilePath: /openapi/influxdb-influxdb3-core/tags/tags/ref-table.yaml + - path: api/token + fields: + name: Token + describes: + - /api/v3/configure/token/admin + - /api/v3/configure/token/admin/regenerate + - /api/v3/configure/token + - /api/v3/configure/token/named_admin + title: Token + description: Manage tokens for authentication and authorization + tag: Token + isConceptual: false + menuGroup: Administration + operations: + - operationId: PostCreateAdminToken + method: POST + path: /api/v3/configure/token/admin + summary: Create admin token + tags: + - Authentication + - Token + - operationId: PostRegenerateAdminToken + method: POST + path: /api/v3/configure/token/admin/regenerate + summary: Regenerate admin token + tags: + - Authentication + - Token + - operationId: DeleteToken + method: DELETE + path: /api/v3/configure/token + summary: Delete token + tags: + - Authentication + - Token + - operationId: PostCreateNamedAdminToken + method: POST + path: /api/v3/configure/token/named_admin + summary: Create named admin token + tags: + - Authentication + - Token + tagDescription: Manage tokens for authentication and authorization + source: static/openapi/influxdb-influxdb3-core/tags/tags/ref-token.yaml + staticFilePath: /openapi/influxdb-influxdb3-core/tags/tags/ref-token.yaml + - path: api/write-data + fields: + name: Write data + describes: + - /write + - /api/v2/write + - /api/v3/write_lp + title: Write data + description: > + Write data to InfluxDB 3 using line protocol format. + + + #### Timestamp precision across write APIs + + + InfluxDB 3 provides multiple write endpoints for compatibility with + different InfluxDB versions. + + The following table compares timestamp precision support across v1, v2, + and v3 write APIs: + + + | Precision | v1 (`/write`) | v2 (`/api/v2/write`) | v3 + (`/api/v3/write_lp`) | + + |-----------|---------------|----------------------|-------------------------| + + | **Auto detection** | ❌ No | ❌ No | ✅ `auto` (default) | + + | **Seconds** | ✅ `s` | ✅ `s` | ✅ `second` | + + | **Milliseconds** | ✅ `ms` | ✅ `ms` | ✅ `millisecond` | + + | **Microseconds** | ✅ `u` or `µ` | ✅ `us` | ✅ `microsecond` | + + | **Nanoseconds** | ✅ `ns` | ✅ `ns` | ✅ `nanosecond` | + + | **Minutes** | ✅ `m` | ❌ No | ❌ No | + + | **Hours** | ✅ `h` | ❌ No | ❌ No | + + | **Default** | Nanosecond | Nanosecond | **Auto** (guessed) | + + + All timestamps are stored internally as nanoseconds. + tag: Write data + isConceptual: false + menuGroup: Data Operations + operations: + - operationId: PostV1Write + method: POST + path: /write + summary: Write line protocol (v1-compatible) + tags: + - Compatibility endpoints + - Write data + - operationId: PostV2Write + method: POST + path: /api/v2/write + summary: Write line protocol (v2-compatible) + tags: + - Compatibility endpoints + - Write data + - operationId: PostWriteLP + method: POST + path: /api/v3/write_lp + summary: Write line protocol + tags: + - Write data + tagDescription: > + Write data to InfluxDB 3 using line protocol format. + + + #### Timestamp precision across write APIs + + + InfluxDB 3 provides multiple write endpoints for compatibility with + different InfluxDB versions. + + The following table compares timestamp precision support across v1, v2, + and v3 write APIs: + + + | Precision | v1 (`/write`) | v2 (`/api/v2/write`) | v3 + (`/api/v3/write_lp`) | + + |-----------|---------------|----------------------|-------------------------| + + | **Auto detection** | ❌ No | ❌ No | ✅ `auto` (default) | + + | **Seconds** | ✅ `s` | ✅ `s` | ✅ `second` | + + | **Milliseconds** | ✅ `ms` | ✅ `ms` | ✅ `millisecond` | + + | **Microseconds** | ✅ `u` or `µ` | ✅ `us` | ✅ `microsecond` | + + | **Nanoseconds** | ✅ `ns` | ✅ `ns` | ✅ `nanosecond` | + + | **Minutes** | ✅ `m` | ❌ No | ❌ No | + + | **Hours** | ✅ `h` | ❌ No | ❌ No | + + | **Default** | Nanosecond | Nanosecond | **Auto** (guessed) | + + + All timestamps are stored internally as nanoseconds. + source: static/openapi/influxdb-influxdb3-core/tags/tags/ref-write-data.yaml + staticFilePath: /openapi/influxdb-influxdb3-core/tags/tags/ref-write-data.yaml diff --git a/data/article_data/influxdb/influxdb3_enterprise/articles.yml b/data/article_data/influxdb/influxdb3_enterprise/articles.yml new file mode 100644 index 0000000000..1a31bf3533 --- /dev/null +++ b/data/article_data/influxdb/influxdb3_enterprise/articles.yml @@ -0,0 +1,246 @@ +articles: + - path: api/v1/health + fields: + name: /api/v1/health + describes: + - /api/v1/health + menuName: /api/v1/health + title: /api/v1/health + tags: + - api-v1 + - health + apiTags: + - Server information + - Compatibility endpoints + source: >- + static/openapi/influxdb-influxdb3-enterprise/paths/ref-api-v1-health.yaml + staticFilePath: /openapi/influxdb-influxdb3-enterprise/paths/ref-api-v1-health.yaml + - path: api/v2/write + fields: + name: /api/v2/write + describes: + - /api/v2/write + menuName: /api/v2/write + title: /api/v2/write + tags: + - api-v2 + - write + apiTags: + - Compatibility endpoints + - Write data + source: static/openapi/influxdb-influxdb3-enterprise/paths/ref-api-v2-write.yaml + staticFilePath: /openapi/influxdb-influxdb3-enterprise/paths/ref-api-v2-write.yaml + - path: api/v3/configure + fields: + name: /api/v3/configure + describes: + - /api/v3/configure/database + - /api/v3/configure/database/retention_period + - /api/v3/configure/database/{db} + - /api/v3/configure/distinct_cache + - /api/v3/configure/enterprise/token + - /api/v3/configure/last_cache + - /api/v3/configure/plugin_environment/install_packages + - /api/v3/configure/plugin_environment/install_requirements + - /api/v3/configure/processing_engine_trigger + - /api/v3/configure/processing_engine_trigger/disable + - /api/v3/configure/processing_engine_trigger/enable + - /api/v3/configure/table + - /api/v3/configure/token + - /api/v3/configure/token/admin + - /api/v3/configure/token/admin/regenerate + - /api/v3/configure/token/named_admin + menuName: /api/v3/configure + title: /api/v3/configure + tags: + - api-v3 + - configure + apiTags: + - Database + - Cache data + - Table + - Authentication + - Token + - Processing engine + source: >- + static/openapi/influxdb-influxdb3-enterprise/paths/ref-api-v3-configure.yaml + staticFilePath: /openapi/influxdb-influxdb3-enterprise/paths/ref-api-v3-configure.yaml + - path: api/v3/engine + fields: + name: /api/v3/engine + describes: + - /api/v3/engine/{request_path} + menuName: /api/v3/engine + title: /api/v3/engine + tags: + - api-v3 + - engine + apiTags: + - Processing engine + source: >- + static/openapi/influxdb-influxdb3-enterprise/paths/ref-api-v3-engine.yaml + staticFilePath: /openapi/influxdb-influxdb3-enterprise/paths/ref-api-v3-engine.yaml + - path: api/v3/plugin_test + fields: + name: /api/v3/plugin_test + describes: + - /api/v3/plugin_test/schedule + - /api/v3/plugin_test/wal + menuName: /api/v3/plugin_test + title: /api/v3/plugin_test + tags: + - api-v3 + - plugin_test + apiTags: + - Processing engine + source: >- + static/openapi/influxdb-influxdb3-enterprise/paths/ref-api-v3-plugin_test.yaml + staticFilePath: /openapi/influxdb-influxdb3-enterprise/paths/ref-api-v3-plugin_test.yaml + - path: api/v3/plugins + fields: + name: /api/v3/plugins + describes: + - /api/v3/plugins/directory + - /api/v3/plugins/files + menuName: /api/v3/plugins + title: /api/v3/plugins + tags: + - api-v3 + - plugins + apiTags: + - Processing engine + source: >- + static/openapi/influxdb-influxdb3-enterprise/paths/ref-api-v3-plugins.yaml + staticFilePath: /openapi/influxdb-influxdb3-enterprise/paths/ref-api-v3-plugins.yaml + - path: api/v3/query_influxql + fields: + name: /api/v3/query_influxql + describes: + - /api/v3/query_influxql + menuName: /api/v3/query_influxql + title: /api/v3/query_influxql + tags: + - api-v3 + - query_influxql + apiTags: + - Query data + source: >- + static/openapi/influxdb-influxdb3-enterprise/paths/ref-api-v3-query_influxql.yaml + staticFilePath: >- + /openapi/influxdb-influxdb3-enterprise/paths/ref-api-v3-query_influxql.yaml + - path: api/v3/query_sql + fields: + name: /api/v3/query_sql + describes: + - /api/v3/query_sql + menuName: /api/v3/query_sql + title: /api/v3/query_sql + tags: + - api-v3 + - query_sql + apiTags: + - Query data + source: >- + static/openapi/influxdb-influxdb3-enterprise/paths/ref-api-v3-query_sql.yaml + staticFilePath: /openapi/influxdb-influxdb3-enterprise/paths/ref-api-v3-query_sql.yaml + - path: api/v3/show + fields: + name: /api/v3/show + describes: + - /api/v3/show/license + menuName: /api/v3/show + title: /api/v3/show + tags: + - api-v3 + - show + apiTags: + - Server information + source: static/openapi/influxdb-influxdb3-enterprise/paths/ref-api-v3-show.yaml + staticFilePath: /openapi/influxdb-influxdb3-enterprise/paths/ref-api-v3-show.yaml + - path: api/v3/write_lp + fields: + name: /api/v3/write_lp + describes: + - /api/v3/write_lp + menuName: /api/v3/write_lp + title: /api/v3/write_lp + tags: + - api-v3 + - write_lp + apiTags: + - Write data + source: >- + static/openapi/influxdb-influxdb3-enterprise/paths/ref-api-v3-write_lp.yaml + staticFilePath: /openapi/influxdb-influxdb3-enterprise/paths/ref-api-v3-write_lp.yaml + - path: api/health + fields: + name: /health + describes: + - /health + menuName: /health + title: /health + tags: + - '' + - health + apiTags: + - Server information + source: static/openapi/influxdb-influxdb3-enterprise/paths/ref-health.yaml + staticFilePath: /openapi/influxdb-influxdb3-enterprise/paths/ref-health.yaml + - path: api/metrics + fields: + name: /metrics + describes: + - /metrics + menuName: /metrics + title: /metrics + tags: + - '' + - metrics + apiTags: + - Server information + source: static/openapi/influxdb-influxdb3-enterprise/paths/ref-metrics.yaml + staticFilePath: /openapi/influxdb-influxdb3-enterprise/paths/ref-metrics.yaml + - path: api/ping + fields: + name: /ping + describes: + - /ping + menuName: /ping + title: /ping + tags: + - '' + - ping + apiTags: + - Server information + source: static/openapi/influxdb-influxdb3-enterprise/paths/ref-ping.yaml + staticFilePath: /openapi/influxdb-influxdb3-enterprise/paths/ref-ping.yaml + - path: api/query + fields: + name: /query + describes: + - /query + menuName: /query + title: /query + tags: + - '' + - query + apiTags: + - Query data + - Compatibility endpoints + source: static/openapi/influxdb-influxdb3-enterprise/paths/ref-query.yaml + staticFilePath: /openapi/influxdb-influxdb3-enterprise/paths/ref-query.yaml + - path: api/write + fields: + name: /write + describes: + - /write + menuName: /write + title: /write + tags: + - '' + - write + apiTags: + - Compatibility endpoints + - Write data + source: static/openapi/influxdb-influxdb3-enterprise/paths/ref-write.yaml + staticFilePath: /openapi/influxdb-influxdb3-enterprise/paths/ref-write.yaml diff --git a/data/article_data/influxdb/oss-v2/articles.yml b/data/article_data/influxdb/oss-v2/articles.yml new file mode 100644 index 0000000000..aa37b5a614 --- /dev/null +++ b/data/article_data/influxdb/oss-v2/articles.yml @@ -0,0 +1,757 @@ +articles: + - path: api/v2/authorizations + fields: + name: /api/v2/authorizations + describes: + - /api/v2/authorizations + - /api/v2/authorizations/{authID} + title: |- + /api/v2/authorizations + InfluxDB OSS API Service + tags: + - api-v2 + - authorizations + source: static/openapi/influxdb-oss-v2/paths/ref-api-v2-authorizations.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-api-v2-authorizations.yaml + - path: api/v2/backup + fields: + name: /api/v2/backup + describes: + - /api/v2/backup/kv + - /api/v2/backup/metadata + - /api/v2/backup/shards/{shardID} + title: |- + /api/v2/backup + InfluxDB OSS API Service + tags: + - api-v2 + - backup + source: static/openapi/influxdb-oss-v2/paths/ref-api-v2-backup.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-api-v2-backup.yaml + - path: api/v2/buckets + fields: + name: /api/v2/buckets + describes: + - /api/v2/buckets + - /api/v2/buckets/{bucketID} + - /api/v2/buckets/{bucketID}/labels + - /api/v2/buckets/{bucketID}/labels/{labelID} + - /api/v2/buckets/{bucketID}/members + - /api/v2/buckets/{bucketID}/members/{userID} + - /api/v2/buckets/{bucketID}/owners + - /api/v2/buckets/{bucketID}/owners/{userID} + title: |- + /api/v2/buckets + InfluxDB OSS API Service + tags: + - api-v2 + - buckets + source: static/openapi/influxdb-oss-v2/paths/ref-api-v2-buckets.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-api-v2-buckets.yaml + - path: api/v2/checks + fields: + name: /api/v2/checks + describes: + - /api/v2/checks + - /api/v2/checks/{checkID} + - /api/v2/checks/{checkID}/labels + - /api/v2/checks/{checkID}/labels/{labelID} + - /api/v2/checks/{checkID}/query + title: |- + /api/v2/checks + InfluxDB OSS API Service + tags: + - api-v2 + - checks + source: static/openapi/influxdb-oss-v2/paths/ref-api-v2-checks.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-api-v2-checks.yaml + - path: api/v2/config + fields: + name: /api/v2/config + describes: + - /api/v2/config + title: |- + /api/v2/config + InfluxDB OSS API Service + tags: + - api-v2 + - config + source: static/openapi/influxdb-oss-v2/paths/ref-api-v2-config.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-api-v2-config.yaml + - path: api/v2/dashboards + fields: + name: /api/v2/dashboards + describes: + - /api/v2/dashboards + - /api/v2/dashboards/{dashboardID} + - /api/v2/dashboards/{dashboardID}/cells + - /api/v2/dashboards/{dashboardID}/cells/{cellID} + - /api/v2/dashboards/{dashboardID}/cells/{cellID}/view + - /api/v2/dashboards/{dashboardID}/labels + - /api/v2/dashboards/{dashboardID}/labels/{labelID} + - /api/v2/dashboards/{dashboardID}/members + - /api/v2/dashboards/{dashboardID}/members/{userID} + - /api/v2/dashboards/{dashboardID}/owners + - /api/v2/dashboards/{dashboardID}/owners/{userID} + title: |- + /api/v2/dashboards + InfluxDB OSS API Service + tags: + - api-v2 + - dashboards + source: static/openapi/influxdb-oss-v2/paths/ref-api-v2-dashboards.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-api-v2-dashboards.yaml + - path: api/v2/dbrps + fields: + name: /api/v2/dbrps + describes: + - /api/v2/dbrps + - /api/v2/dbrps/{dbrpID} + title: |- + /api/v2/dbrps + InfluxDB OSS API Service + tags: + - api-v2 + - dbrps + source: static/openapi/influxdb-oss-v2/paths/ref-api-v2-dbrps.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-api-v2-dbrps.yaml + - path: api/v2/delete + fields: + name: /api/v2/delete + describes: + - /api/v2/delete + title: |- + /api/v2/delete + InfluxDB OSS API Service + tags: + - api-v2 + - delete + source: static/openapi/influxdb-oss-v2/paths/ref-api-v2-delete.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-api-v2-delete.yaml + - path: api/v2/flags + fields: + name: /api/v2/flags + describes: + - /api/v2/flags + title: |- + /api/v2/flags + InfluxDB OSS API Service + tags: + - api-v2 + - flags + source: static/openapi/influxdb-oss-v2/paths/ref-api-v2-flags.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-api-v2-flags.yaml + - path: api/v2/labels + fields: + name: /api/v2/labels + describes: + - /api/v2/labels + - /api/v2/labels/{labelID} + title: |- + /api/v2/labels + InfluxDB OSS API Service + tags: + - api-v2 + - labels + source: static/openapi/influxdb-oss-v2/paths/ref-api-v2-labels.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-api-v2-labels.yaml + - path: api/v2/maps + fields: + name: /api/v2/maps + describes: + - /api/v2/maps/mapToken + title: |- + /api/v2/maps + InfluxDB OSS API Service + tags: + - api-v2 + - maps + source: static/openapi/influxdb-oss-v2/paths/ref-api-v2-maps.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-api-v2-maps.yaml + - path: api/v2/me + fields: + name: /api/v2/me + describes: + - /api/v2/me + - /api/v2/me/password + title: |- + /api/v2/me + InfluxDB OSS API Service + tags: + - api-v2 + - me + source: static/openapi/influxdb-oss-v2/paths/ref-api-v2-me.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-api-v2-me.yaml + - path: api/v2/notificationEndpoints + fields: + name: /api/v2/notificationEndpoints + describes: + - /api/v2/notificationEndpoints + - /api/v2/notificationEndpoints/{endpointID} + - /api/v2/notificationEndpoints/{endpointID}/labels + - /api/v2/notificationEndpoints/{endpointID}/labels/{labelID} + title: |- + /api/v2/notificationEndpoints + InfluxDB OSS API Service + tags: + - api-v2 + - notificationEndpoints + source: >- + static/openapi/influxdb-oss-v2/paths/ref-api-v2-notificationEndpoints.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-api-v2-notificationEndpoints.yaml + - path: api/v2/notificationRules + fields: + name: /api/v2/notificationRules + describes: + - /api/v2/notificationRules + - /api/v2/notificationRules/{ruleID} + - /api/v2/notificationRules/{ruleID}/labels + - /api/v2/notificationRules/{ruleID}/labels/{labelID} + - /api/v2/notificationRules/{ruleID}/query + title: |- + /api/v2/notificationRules + InfluxDB OSS API Service + tags: + - api-v2 + - notificationRules + source: static/openapi/influxdb-oss-v2/paths/ref-api-v2-notificationRules.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-api-v2-notificationRules.yaml + - path: api/v2/orgs + fields: + name: /api/v2/orgs + describes: + - /api/v2/orgs + - /api/v2/orgs/{orgID} + - /api/v2/orgs/{orgID}/members + - /api/v2/orgs/{orgID}/members/{userID} + - /api/v2/orgs/{orgID}/owners + - /api/v2/orgs/{orgID}/owners/{userID} + - /api/v2/orgs/{orgID}/secrets + - /api/v2/orgs/{orgID}/secrets/delete + - /api/v2/orgs/{orgID}/secrets/{secretID} + title: |- + /api/v2/orgs + InfluxDB OSS API Service + tags: + - api-v2 + - orgs + source: static/openapi/influxdb-oss-v2/paths/ref-api-v2-orgs.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-api-v2-orgs.yaml + - path: api/v2/query + fields: + name: /api/v2/query + describes: + - /api/v2/query + - /api/v2/query/analyze + - /api/v2/query/ast + - /api/v2/query/suggestions + - /api/v2/query/suggestions/{name} + title: |- + /api/v2/query + InfluxDB OSS API Service + tags: + - api-v2 + - query + source: static/openapi/influxdb-oss-v2/paths/ref-api-v2-query.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-api-v2-query.yaml + - path: api/v2/remotes + fields: + name: /api/v2/remotes + describes: + - /api/v2/remotes + - /api/v2/remotes/{remoteID} + title: |- + /api/v2/remotes + InfluxDB OSS API Service + tags: + - api-v2 + - remotes + source: static/openapi/influxdb-oss-v2/paths/ref-api-v2-remotes.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-api-v2-remotes.yaml + - path: api/v2/replications + fields: + name: /api/v2/replications + describes: + - /api/v2/replications + - /api/v2/replications/{replicationID} + - /api/v2/replications/{replicationID}/validate + title: |- + /api/v2/replications + InfluxDB OSS API Service + tags: + - api-v2 + - replications + source: static/openapi/influxdb-oss-v2/paths/ref-api-v2-replications.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-api-v2-replications.yaml + - path: api/v2/resources + fields: + name: /api/v2/resources + describes: + - /api/v2/resources + title: |- + /api/v2/resources + InfluxDB OSS API Service + tags: + - api-v2 + - resources + source: static/openapi/influxdb-oss-v2/paths/ref-api-v2-resources.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-api-v2-resources.yaml + - path: api/v2/restore + fields: + name: /api/v2/restore + describes: + - /api/v2/restore/bucket/{bucketID} + - /api/v2/restore/bucketMetadata + - /api/v2/restore/kv + - /api/v2/restore/shards/{shardID} + - /api/v2/restore/sql + title: |- + /api/v2/restore + InfluxDB OSS API Service + tags: + - api-v2 + - restore + source: static/openapi/influxdb-oss-v2/paths/ref-api-v2-restore.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-api-v2-restore.yaml + - path: api/v2/scrapers + fields: + name: /api/v2/scrapers + describes: + - /api/v2/scrapers + - /api/v2/scrapers/{scraperTargetID} + - /api/v2/scrapers/{scraperTargetID}/labels + - /api/v2/scrapers/{scraperTargetID}/labels/{labelID} + - /api/v2/scrapers/{scraperTargetID}/members + - /api/v2/scrapers/{scraperTargetID}/members/{userID} + - /api/v2/scrapers/{scraperTargetID}/owners + - /api/v2/scrapers/{scraperTargetID}/owners/{userID} + title: |- + /api/v2/scrapers + InfluxDB OSS API Service + tags: + - api-v2 + - scrapers + source: static/openapi/influxdb-oss-v2/paths/ref-api-v2-scrapers.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-api-v2-scrapers.yaml + - path: api/v2/setup + fields: + name: /api/v2/setup + describes: + - /api/v2/setup + title: |- + /api/v2/setup + InfluxDB OSS API Service + tags: + - api-v2 + - setup + source: static/openapi/influxdb-oss-v2/paths/ref-api-v2-setup.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-api-v2-setup.yaml + - path: api/v2/signin + fields: + name: /api/v2/signin + describes: + - /api/v2/signin + title: |- + /api/v2/signin + InfluxDB OSS API Service + tags: + - api-v2 + - signin + source: static/openapi/influxdb-oss-v2/paths/ref-api-v2-signin.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-api-v2-signin.yaml + - path: api/v2/signout + fields: + name: /api/v2/signout + describes: + - /api/v2/signout + title: |- + /api/v2/signout + InfluxDB OSS API Service + tags: + - api-v2 + - signout + source: static/openapi/influxdb-oss-v2/paths/ref-api-v2-signout.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-api-v2-signout.yaml + - path: api/v2/sources + fields: + name: /api/v2/sources + describes: + - /api/v2/sources + - /api/v2/sources/{sourceID} + - /api/v2/sources/{sourceID}/buckets + - /api/v2/sources/{sourceID}/health + title: |- + /api/v2/sources + InfluxDB OSS API Service + tags: + - api-v2 + - sources + source: static/openapi/influxdb-oss-v2/paths/ref-api-v2-sources.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-api-v2-sources.yaml + - path: api/v2/stacks + fields: + name: /api/v2/stacks + describes: + - /api/v2/stacks + - /api/v2/stacks/{stack_id} + - /api/v2/stacks/{stack_id}/uninstall + title: |- + /api/v2/stacks + InfluxDB OSS API Service + tags: + - api-v2 + - stacks + source: static/openapi/influxdb-oss-v2/paths/ref-api-v2-stacks.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-api-v2-stacks.yaml + - path: api/v2/tasks + fields: + name: /api/v2/tasks + describes: + - /api/v2/tasks + - /api/v2/tasks/{taskID} + - /api/v2/tasks/{taskID}/labels + - /api/v2/tasks/{taskID}/labels/{labelID} + - /api/v2/tasks/{taskID}/logs + - /api/v2/tasks/{taskID}/members + - /api/v2/tasks/{taskID}/members/{userID} + - /api/v2/tasks/{taskID}/owners + - /api/v2/tasks/{taskID}/owners/{userID} + - /api/v2/tasks/{taskID}/runs + - /api/v2/tasks/{taskID}/runs/{runID} + - /api/v2/tasks/{taskID}/runs/{runID}/logs + - /api/v2/tasks/{taskID}/runs/{runID}/retry + title: |- + /api/v2/tasks + InfluxDB OSS API Service + tags: + - api-v2 + - tasks + source: static/openapi/influxdb-oss-v2/paths/ref-api-v2-tasks.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-api-v2-tasks.yaml + - path: api/v2/telegraf + fields: + name: /api/v2/telegraf + describes: + - /api/v2/telegraf/plugins + title: |- + /api/v2/telegraf + InfluxDB OSS API Service + tags: + - api-v2 + - telegraf + source: static/openapi/influxdb-oss-v2/paths/ref-api-v2-telegraf.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-api-v2-telegraf.yaml + - path: api/v2/telegrafs + fields: + name: /api/v2/telegrafs + describes: + - /api/v2/telegrafs + - /api/v2/telegrafs/{telegrafID} + - /api/v2/telegrafs/{telegrafID}/labels + - /api/v2/telegrafs/{telegrafID}/labels/{labelID} + - /api/v2/telegrafs/{telegrafID}/members + - /api/v2/telegrafs/{telegrafID}/members/{userID} + - /api/v2/telegrafs/{telegrafID}/owners + - /api/v2/telegrafs/{telegrafID}/owners/{userID} + title: |- + /api/v2/telegrafs + InfluxDB OSS API Service + tags: + - api-v2 + - telegrafs + source: static/openapi/influxdb-oss-v2/paths/ref-api-v2-telegrafs.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-api-v2-telegrafs.yaml + - path: api/v2/templates + fields: + name: /api/v2/templates + describes: + - /api/v2/templates/apply + - /api/v2/templates/export + title: |- + /api/v2/templates + InfluxDB OSS API Service + tags: + - api-v2 + - templates + source: static/openapi/influxdb-oss-v2/paths/ref-api-v2-templates.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-api-v2-templates.yaml + - path: api/v2/users + fields: + name: /api/v2/users + describes: + - /api/v2/users + - /api/v2/users/{userID} + - /api/v2/users/{userID}/password + title: |- + /api/v2/users + InfluxDB OSS API Service + tags: + - api-v2 + - users + source: static/openapi/influxdb-oss-v2/paths/ref-api-v2-users.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-api-v2-users.yaml + - path: api/v2/variables + fields: + name: /api/v2/variables + describes: + - /api/v2/variables + - /api/v2/variables/{variableID} + - /api/v2/variables/{variableID}/labels + - /api/v2/variables/{variableID}/labels/{labelID} + title: |- + /api/v2/variables + InfluxDB OSS API Service + tags: + - api-v2 + - variables + source: static/openapi/influxdb-oss-v2/paths/ref-api-v2-variables.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-api-v2-variables.yaml + - path: api/v2/write + fields: + name: /api/v2/write + describes: + - /api/v2/write + title: |- + /api/v2/write + InfluxDB OSS API Service + tags: + - api-v2 + - write + source: static/openapi/influxdb-oss-v2/paths/ref-api-v2-write.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-api-v2-write.yaml + - path: api/v2 + fields: + name: /api/v2 + describes: + - /api/v2 + title: |- + /api/v2 + InfluxDB OSS API Service + tags: + - api + - v2 + source: static/openapi/influxdb-oss-v2/paths/ref-api-v2.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-api-v2.yaml + - path: debug/pprof/all + fields: + name: /debug/pprof/all + describes: + - /debug/pprof/all + title: |- + /debug/pprof/all + InfluxDB OSS API Service + tags: + - debug-pprof + - all + source: static/openapi/influxdb-oss-v2/paths/ref-debug-pprof-all.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-debug-pprof-all.yaml + - path: debug/pprof/allocs + fields: + name: /debug/pprof/allocs + describes: + - /debug/pprof/allocs + title: |- + /debug/pprof/allocs + InfluxDB OSS API Service + tags: + - debug-pprof + - allocs + source: static/openapi/influxdb-oss-v2/paths/ref-debug-pprof-allocs.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-debug-pprof-allocs.yaml + - path: debug/pprof/block + fields: + name: /debug/pprof/block + describes: + - /debug/pprof/block + title: |- + /debug/pprof/block + InfluxDB OSS API Service + tags: + - debug-pprof + - block + source: static/openapi/influxdb-oss-v2/paths/ref-debug-pprof-block.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-debug-pprof-block.yaml + - path: debug/pprof/cmdline + fields: + name: /debug/pprof/cmdline + describes: + - /debug/pprof/cmdline + title: |- + /debug/pprof/cmdline + InfluxDB OSS API Service + tags: + - debug-pprof + - cmdline + source: static/openapi/influxdb-oss-v2/paths/ref-debug-pprof-cmdline.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-debug-pprof-cmdline.yaml + - path: debug/pprof/goroutine + fields: + name: /debug/pprof/goroutine + describes: + - /debug/pprof/goroutine + title: |- + /debug/pprof/goroutine + InfluxDB OSS API Service + tags: + - debug-pprof + - goroutine + source: static/openapi/influxdb-oss-v2/paths/ref-debug-pprof-goroutine.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-debug-pprof-goroutine.yaml + - path: debug/pprof/heap + fields: + name: /debug/pprof/heap + describes: + - /debug/pprof/heap + title: |- + /debug/pprof/heap + InfluxDB OSS API Service + tags: + - debug-pprof + - heap + source: static/openapi/influxdb-oss-v2/paths/ref-debug-pprof-heap.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-debug-pprof-heap.yaml + - path: debug/pprof/mutex + fields: + name: /debug/pprof/mutex + describes: + - /debug/pprof/mutex + title: |- + /debug/pprof/mutex + InfluxDB OSS API Service + tags: + - debug-pprof + - mutex + source: static/openapi/influxdb-oss-v2/paths/ref-debug-pprof-mutex.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-debug-pprof-mutex.yaml + - path: debug/pprof/profile + fields: + name: /debug/pprof/profile + describes: + - /debug/pprof/profile + title: |- + /debug/pprof/profile + InfluxDB OSS API Service + tags: + - debug-pprof + - profile + source: static/openapi/influxdb-oss-v2/paths/ref-debug-pprof-profile.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-debug-pprof-profile.yaml + - path: debug/pprof/threadcreate + fields: + name: /debug/pprof/threadcreate + describes: + - /debug/pprof/threadcreate + title: |- + /debug/pprof/threadcreate + InfluxDB OSS API Service + tags: + - debug-pprof + - threadcreate + source: static/openapi/influxdb-oss-v2/paths/ref-debug-pprof-threadcreate.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-debug-pprof-threadcreate.yaml + - path: debug/pprof/trace + fields: + name: /debug/pprof/trace + describes: + - /debug/pprof/trace + title: |- + /debug/pprof/trace + InfluxDB OSS API Service + tags: + - debug-pprof + - trace + source: static/openapi/influxdb-oss-v2/paths/ref-debug-pprof-trace.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-debug-pprof-trace.yaml + - path: health + fields: + name: /health + describes: + - /health + title: |- + /health + InfluxDB OSS API Service + tags: + - '' + - health + source: static/openapi/influxdb-oss-v2/paths/ref-health.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-health.yaml + - path: legacy/authorizations + fields: + name: /legacy/authorizations + describes: + - /legacy/authorizations + - /legacy/authorizations/{authID} + - /legacy/authorizations/{authID}/password + title: |- + /legacy/authorizations + InfluxDB OSS API Service + tags: + - legacy + - authorizations + source: static/openapi/influxdb-oss-v2/paths/ref-legacy-authorizations.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-legacy-authorizations.yaml + - path: metrics + fields: + name: /metrics + describes: + - /metrics + title: |- + /metrics + InfluxDB OSS API Service + tags: + - '' + - metrics + source: static/openapi/influxdb-oss-v2/paths/ref-metrics.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-metrics.yaml + - path: ping + fields: + name: /ping + describes: + - /ping + title: |- + /ping + InfluxDB OSS API Service + tags: + - '' + - ping + source: static/openapi/influxdb-oss-v2/paths/ref-ping.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-ping.yaml + - path: query + fields: + name: /query + describes: + - /query + title: |- + /query + InfluxDB OSS API Service + tags: + - '' + - query + source: static/openapi/influxdb-oss-v2/paths/ref-query.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-query.yaml + - path: ready + fields: + name: /ready + describes: + - /ready + title: |- + /ready + InfluxDB OSS API Service + tags: + - '' + - ready + source: static/openapi/influxdb-oss-v2/paths/ref-ready.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-ready.yaml + - path: write + fields: + name: /write + describes: + - /write + title: |- + /write + InfluxDB OSS API Service + tags: + - '' + - write + source: static/openapi/influxdb-oss-v2/paths/ref-write.yaml + staticFilePath: /openapi/influxdb-oss-v2/paths/ref-write.yaml diff --git a/layouts/partials/api/sidebar-nav.html b/layouts/partials/api/sidebar-nav.html new file mode 100644 index 0000000000..0acb89d756 --- /dev/null +++ b/layouts/partials/api/sidebar-nav.html @@ -0,0 +1,244 @@ +{{/* + API Reference Navigation for Sidebar + + Displays a collapsible navigation tree for API endpoints, + organized by functional groups defined in data/api_nav_groups.yml. + + Uses Hugo data from data/article-data/ to build navigation. + Supports both tag-based (new) and path-based (legacy) article data. +*/}} + +{{/* Get product path data for data lookup */}} +{{ $productPathData := findRE "[^/]+.*?" .RelPermalink }} +{{ $product := index $productPathData 0 }} +{{ $version := index $productPathData 1 }} + +{{/* Build data key for article data lookup */}} +{{/* Hugo converts hyphens to underscores in data file keys */}} +{{ $dataKey := "" }} +{{ if eq $product "influxdb3" }} + {{ $dataKey = print "influxdb3_" $version }} +{{ else if eq $product "influxdb" }} + {{ $dataKey = print $version }} +{{ else }} + {{ $dataKey = $product }} +{{ end }} + +{{/* Get article data for this product */}} +{{/* + Hugo data path: data/article_data/influxdb/influxdb3_core/articles.yml + Access: .Site.Data.article_data.influxdb.influxdb3_core.articles.articles + + The double "articles" is because: + - First "articles" is the filename (articles.yml) + - Second "articles" is the key inside the YAML file +*/}} +{{ $articles := slice }} +{{ with .Site.Data.article_data }} + {{ with index . "influxdb" }} + {{ with index . $dataKey }} + {{ with index . "articles" }} + {{ with index . "articles" }} + {{ $articles = . }} + {{ end }} + {{ end }} + {{ end }} + {{ end }} +{{ end }} + +{{/* Get navigation groups configuration */}} +{{ $navGroups := .Site.Data.api_nav_groups.groups }} + +{{/* Check if articles use tag-based structure */}} +{{ $isTagBased := false }} +{{ if gt (len $articles) 0 }} + {{ $firstArticle := index $articles 0 }} + {{ if reflect.IsMap $firstArticle }} + {{ with index $firstArticle "fields" }} + {{ if reflect.IsMap . }} + {{ if isset . "tag" }} + {{ $isTagBased = true }} + {{ end }} + {{ end }} + {{ end }} + {{ end }} +{{ end }} + +{{ if and (gt (len $articles) 0) $navGroups $isTagBased }} +{{/* Tag-based navigation */}} + +{{ else if gt (len $articles) 0 }} +{{/* Legacy path-based navigation (fallback) */}} + +{{ end }} From c3a418feabd8e4af01c54ce925d0cddac3fd8f2e Mon Sep 17 00:00:00 2001 From: Jason Stirnaman Date: Mon, 8 Dec 2025 14:07:55 -0600 Subject: [PATCH 11/51] feat(api): Update config, styles, and content for API docs uplift - Update hugo.yml config for API docs settings - Simplify _api-overrides.scss (removed hardcoded content styles) - Import _api-layout.scss in styles-default.scss - Update API landing pages for Core and Enterprise with redirects - Update OpenAPI spec files - Update dependencies --- .gitignore | 1 + api-docs/influxdb3/core/v3/ref.yml | 13 +- api-docs/influxdb3/enterprise/v3/ref.yml | 13 +- assets/styles/layouts/_api-overrides.scss | 271 ++---------------- assets/styles/styles-default.scss | 3 +- config/_default/hugo.yml | 2 +- .../influxdb3/core/reference/api/_index.md | 22 +- .../enterprise/reference/api/_index.md | 20 +- package.json | 2 +- yarn.lock | 2 +- 10 files changed, 51 insertions(+), 298 deletions(-) diff --git a/.gitignore b/.gitignore index caeee60bd5..8688142320 100644 --- a/.gitignore +++ b/.gitignore @@ -12,6 +12,7 @@ package-lock.json # Content generation /content/influxdb*/**/api/**/*.html +/content/influxdb*/**/api/**/*.md !api-docs/**/.config.yml /api-docs/redoc-static.html* diff --git a/api-docs/influxdb3/core/v3/ref.yml b/api-docs/influxdb3/core/v3/ref.yml index 5b00fa7ae5..c3482c416e 100644 --- a/api-docs/influxdb3/core/v3/ref.yml +++ b/api-docs/influxdb3/core/v3/ref.yml @@ -49,12 +49,13 @@ tags: Depending on your workflow, use one of the following schemes to authenticate to the InfluxDB 3 API: | Authentication scheme | Works with | - |:-------------------|:-----------| - | [Bearer authentication](#section/Authentication/BearerAuthentication) | All endpoints | - | [Token authentication](#section/Authentication/TokenAuthentication) | v1, v2 endpoints | - | [Basic authentication](#section/Authentication/BasicAuthentication) | v1 endpoints | - | [Querystring authentication](#section/Authentication/QuerystringAuthentication) | v1 endpoints | - + |:----------------------|:-----------| + | Bearer authentication | All endpoints | + | Token authentication | v1, v2 endpoints | + | Basic authentication | v1 endpoints | + | Querystring authentication | v1 endpoints | + + See the **Security Schemes** section below for details on each authentication method. x-traitTag: true - name: Cache data description: | diff --git a/api-docs/influxdb3/enterprise/v3/ref.yml b/api-docs/influxdb3/enterprise/v3/ref.yml index a7ab0c8f57..2c4d8cfff1 100644 --- a/api-docs/influxdb3/enterprise/v3/ref.yml +++ b/api-docs/influxdb3/enterprise/v3/ref.yml @@ -49,12 +49,13 @@ tags: Depending on your workflow, use one of the following schemes to authenticate to the InfluxDB 3 API: | Authentication scheme | Works with | - |:-------------------|:-----------| - | [Bearer authentication](#section/Authentication/BearerAuthentication) | All endpoints | - | [Token authentication](#section/Authentication/TokenAuthentication) | v1, v2 endpoints | - | [Basic authentication](#section/Authentication/BasicAuthentication) | v1 endpoints | - | [Querystring authentication](#section/Authentication/QuerystringAuthentication) | v1 endpoints | - + |:----------------------|:-----------| + | Bearer authentication | All endpoints | + | Token authentication | v1, v2 endpoints | + | Basic authentication | v1 endpoints | + | Querystring authentication | v1 endpoints | + + See the **Security Schemes** section below for details on each authentication method. x-traitTag: true - name: Cache data description: | diff --git a/assets/styles/layouts/_api-overrides.scss b/assets/styles/layouts/_api-overrides.scss index bc220588f8..f108911e24 100644 --- a/assets/styles/layouts/_api-overrides.scss +++ b/assets/styles/layouts/_api-overrides.scss @@ -1,9 +1,17 @@ +//////////////////////////////////////////////////////////////////////////////// +// API Documentation Style Overrides +// +// Provides loading spinner and reusable API-related styles. +// Note: Legacy Redoc-specific overrides have been removed in favor of +// Scalar/RapiDoc renderers which use CSS custom properties for theming. +//////////////////////////////////////////////////////////////////////////////// + @import "tools/color-palette"; @import "tools/fonts"; // Fonts $proxima: 'Proxima Nova', sans-serif; -$code: 'IBM Plex Mono', monospace;; +$code: 'IBM Plex Mono', monospace; // Font weights $medium: 500; @@ -22,7 +30,7 @@ $bold: 700; } @keyframes spinner { - to {transform: rotate(360deg);} + to { transform: rotate(360deg); } } .spinner:before { @@ -41,256 +49,15 @@ $bold: 700; animation: spinner .6s linear infinite; } -//////////////////////////////// InfluxDB Header /////////////////////////////// - -#influx-header { - font-family: $proxima; - padding: 10px ; - display: flex; - align-items: center; - justify-content: space-between; - background-color: $g2-kevlar; - a { - text-decoration: none; - &.back { - color: $g20-white; - transition: color .2s; - &:hover { - color: $b-pool; - } - &:before { - content: "\e919"; - font-family: 'icomoon-v2'; - margin-right: .65rem; - } - } - &.btn { - padding: .5rem .75rem .5rem .65rem; - font-size: .85rem; - font-weight: 500; - color: $g15-platinum; - background: $g5-pepper; - border-radius: 4.5px; - transition: all .2s; - &:before { - content: "\e934"; - display: inline-block; - font-size: .95rem; - margin-right: .5rem; - font-family: 'icomoon-v2'; - } - &:hover { - color: $g20-white; - background: $b-pool; - } - } - } -} - -// Header Media Queries - -@media (max-width: 600px) { - #influx-header span.version {display: none;} -} - +//////////////////////////////////////////////////////////////////////////////// +/////////////////////////// HTTP Method Badge Colors /////////////////////////// //////////////////////////////////////////////////////////////////////////////// -.cjtbAK { - h1,h2,h3,h4,h5,h6, - p,li,th,td { - font-family: $proxima !important; - } -} - -#redoc { - h1,h2,h3 { - font-weight: $medium !important; - } -} - -// Section title padding -.dluJDj { - padding: 20px 0; -} - -// Page h1 -.dTJWQH { - color: $g7-graphite; - font-size: 2rem; -} - -// Download button -.jIdpVJ { - background: $b-dodger; - color: $g20-white; - border: none; - border-radius: 3px; - font-family: $proxima; - font-size: .85rem; - font-weight: $medium; - transition: background-color .2s; - &:hover { - background-color: $b-pool; - } -} - -// Tag h1s -.WxWXp { - color: $g7-graphite; - font-size: 1.75rem; -} - -// Summaru h2s and table headers -.ioYTqA, .bxcHYI, .hoUoen { - color: $g7-graphite; -} - -// h3s -.espozG { - color: $g8-storm; -} - -// Links -.bnFPhO a { color: $b-dodger; - &:visited {color: $b-dodger;} -} - -.redoc-json { - font-family: $code !important; -} - -// Inline Code -.flfxUM code, -.gDsWLk code, -.kTVySD { - font-family: $code !important; - color: $cp-marguerite; - background: $cp-titan; - border-color: $cp-titan; -} - -// Required tags -.jsTAxL { - color: $r-curacao; -} - -///////////////////////////// RESPONSE COLOR BLOCKS //////////////////////////// - -// Green -.hLVzSF, .fDvFMp { - background-color: rgba($gr-honeydew, .2); - color: $gr-emerald; -} - -// Red -.byLrBg { - background-color: rgba($r-curacao, .1); - color: $r-curacao; -} - - - -/////////////////////////////////// LEFT NAV /////////////////////////////////// - -// Left nav background -.gZdDsM { - background-color: $g19-ghost; -} - -.gpbcFk:hover, .sc-eTuwsz.active { - background-color: $g17-whisper; -} - -// List item text -.SmuWE, .gcUzvG, .bbViyS, .sc-hrWEMg label { - font-family: $proxima !important; -} - -.fyUykq { - font-weight: $medium; -} - -// Request method tags -.cFwMcp { - &.post { background-color: $b-ocean; } - &.get { background-color: $gr-rainforest; } - &.put { background-color: $br-galaxy; } - &.patch { background-color: $y-thunder; color: rgba($g5-pepper, .75);} - &.delete { background-color: $r-curacao; } -} - -// Active nav section -.gcUzvG, .iNzLCk:hover { - color: $br-magenta; -} - -/////////////////////////////// RIGHT CODE COLUMN ////////////////////////////// - -// Right column backgrounds -.dtUibw, .fLUKgj { - background-color: $g2-kevlar; - h3,h4,h5,h6 { - font-family: $proxima !important; - font-weight: $medium !important; - } -} - -// Code backgrounds -.irpqyy > .react-tabs__tab-panel { - background-color: $g0-obsidian; -} -.dHLKeu, .fVaxnA { - padding-left: 10px; - background-color: $g0-obsidian; -} - -// Response code tabs -.irpqyy > ul > li { - background-color: $g0-obsidian; - border-radius: 3px; - &.react-tabs__tab--selected{ color: $br-pulsar;} - &.tab-error { color: $r-fire; } - &.tab-success { color: $gr-viridian; } -} - -// Request methods -.bNYCAJ, -.jBjYbV, -.hOczRB, -.fRsrDc, -.hPskZd { - font-family: $proxima; - font-weight: $medium; - letter-spacing: .04em; - border-radius: 3px; -} -.bNYCAJ { background-color: $b-ocean; } /* Post */ -.jBjYbV { background-color: $gr-viridian; } /* Get */ -.hOczRB { background-color: $br-galaxy; } /* Put */ -.fRsrDc { background-color: $y-thunder; color: $g5-pepper; } /* Patch */ -.hPskZd { background-color: $r-curacao; } /* Delete */ - -// Content type block -.gzAoUb { - background-color: $g2-kevlar; - font-family: $proxima; -} -.iENVAs { font-family: $code; } -.dpMbau { font-family: $proxima; } - -// Code controls -.fCJmC { - font-family: $proxima; - span { border-radius: 3px; } -} - -// Code blocks -.kZHJcC { font-family: $code; } -.jCgylq { - .token.string { - color: $gr-honeydew; - & + a { color: $b-pool; } - } - .token.boolean { color: #f955b0; } -} +// Reusable method badge colors (used by _api-layout.scss .method-badge) +// These follow standard REST API color conventions +$method-get: $gr-rainforest; +$method-post: $b-ocean; +$method-put: $br-galaxy; +$method-patch: $y-thunder; +$method-delete: $r-curacao; diff --git a/assets/styles/styles-default.scss b/assets/styles/styles-default.scss index 8852a240c3..94c897f291 100644 --- a/assets/styles/styles-default.scss +++ b/assets/styles/styles-default.scss @@ -32,7 +32,8 @@ "layouts/v1-overrides", "layouts/notifications", "layouts/code-controls", - "layouts/v3-wayfinding"; + "layouts/v3-wayfinding", + "layouts/api-layout"; // Import Components @import "components/influxdb-version-detector", diff --git a/config/_default/hugo.yml b/config/_default/hugo.yml index 08859dea60..12b0f0ad17 100644 --- a/config/_default/hugo.yml +++ b/config/_default/hugo.yml @@ -99,7 +99,7 @@ params: env: development environment: development # API documentation renderer: "scalar" (default) or "rapidoc" - apiRenderer: scalar + apiRenderer: rapidoc # Configure the server for development server: diff --git a/content/influxdb3/core/reference/api/_index.md b/content/influxdb3/core/reference/api/_index.md index 6a2200b1e5..13724ba97f 100644 --- a/content/influxdb3/core/reference/api/_index.md +++ b/content/influxdb3/core/reference/api/_index.md @@ -1,20 +1,12 @@ --- title: InfluxDB HTTP API description: > - The InfluxDB HTTP API for {{% product-name %}} provides a programmatic interface - for interactions with InfluxDB, - including writing, querying, and processing data, and managing an InfluxDB 3 - instance. -menu: - influxdb3_core: - parent: Reference - name: InfluxDB HTTP API -weight: 104 -influxdb3/core/tags: [api] -source: /shared/influxdb3-api-reference/_index.md + The InfluxDB HTTP API for InfluxDB 3 Core provides a programmatic interface + for interactions with InfluxDB. +# Redirect to the new location +aliases: + - /influxdb3/core/reference/api/ +redirect: /influxdb3/core/api/ --- - +This page has moved to [InfluxDB HTTP API](/influxdb3/core/api/). diff --git a/content/influxdb3/enterprise/reference/api/_index.md b/content/influxdb3/enterprise/reference/api/_index.md index ea78867f6d..a5a831de48 100644 --- a/content/influxdb3/enterprise/reference/api/_index.md +++ b/content/influxdb3/enterprise/reference/api/_index.md @@ -1,20 +1,10 @@ --- title: InfluxDB HTTP API description: > - The InfluxDB HTTP API for {{% product-name %}} provides a programmatic interface - for interactions with InfluxDB, - including writing, querying, and processing data, and managing an InfluxDB 3 - instance. -menu: - influxdb3_enterprise: - parent: Reference - name: InfluxDB HTTP API -weight: 104 -influxdb3/enterprise/tags: [api] -source: /shared/influxdb3-api-reference/_index.md + The InfluxDB HTTP API for InfluxDB 3 Enterprise provides a programmatic interface + for interactions with InfluxDB. +# Redirect to the new location +redirect: /influxdb3/enterprise/api/ --- - +This page has moved to [InfluxDB HTTP API](/influxdb3/enterprise/api/). diff --git a/package.json b/package.json index 5ee36aa39f..92a162df9a 100644 --- a/package.json +++ b/package.json @@ -36,7 +36,7 @@ "remark-frontmatter": "^5.0.0", "remark-gfm": "^4.0.1", "remark-parse": "^11.0.0", - "typescript": "^5.8.3", + "typescript": "^5.9.3", "typescript-eslint": "^8.32.1", "unified": "^11.0.5", "winston": "^3.16.0" diff --git a/yarn.lock b/yarn.lock index 9761223ff4..ae7323bb1a 100644 --- a/yarn.lock +++ b/yarn.lock @@ -6033,7 +6033,7 @@ typescript-eslint@^8.32.1: "@typescript-eslint/typescript-estree" "8.47.0" "@typescript-eslint/utils" "8.47.0" -typescript@^5.8.3: +typescript@^5.9.3: version "5.9.3" resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.9.3.tgz#5b4f59e15310ab17a216f5d6cf53ee476ede670f" integrity sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw== From 2ab92745759c0917455decb41ed359dee594f0d8 Mon Sep 17 00:00:00 2001 From: Jason Stirnaman Date: Mon, 8 Dec 2025 14:08:09 -0600 Subject: [PATCH 12/51] test(api): Update Cypress tests for API reference pages - Update tests for new tag-based API page structure - Add tests for operations list rendering - Add tests for sidebar navigation groups --- cypress/e2e/content/api-reference.cy.js | 156 +++++++++++++++++++++++- 1 file changed, 155 insertions(+), 1 deletion(-) diff --git a/cypress/e2e/content/api-reference.cy.js b/cypress/e2e/content/api-reference.cy.js index ceeaffeffc..e2798563c4 100644 --- a/cypress/e2e/content/api-reference.cy.js +++ b/cypress/e2e/content/api-reference.cy.js @@ -1,4 +1,16 @@ /// + +/** + * API Reference Documentation E2E Tests + * + * Tests both: + * 1. Legacy API reference pages (link validation, content structure) + * 2. New 3-column layout with tabs and TOC (for InfluxDB 3 Core/Enterprise) + * + * Run with: + * node cypress/support/run-e2e-specs.js --spec "cypress/e2e/content/api-reference.cy.js" content/influxdb3/core/reference/api/_index.md + */ + const fakeGoogleTagManager = { trackingOptIn: () => {}, trackingOptOut: () => {} @@ -64,7 +76,8 @@ describe('API reference content', () => { it(`has API info`, function () { cy.get('script[data-user-analytics-fingerprint-enabled=false]').should('have.length', 1); cy.get('h1').first().should('have.length', 1); - cy.get('[data-role$=description]').should('have.length', 1); + // Check for description element (either article--description class or data-role attribute) + cy.get('.article--description, [data-role$=description]').should('have.length.at.least', 1); }); it('links back to the version home page', function () { cy.get('a.back').contains('Docs') @@ -109,3 +122,144 @@ describe('API reference content', () => { }); }); }); + +/** + * 3-Column API Reference Layout Tests + * Tests the new layout for InfluxDB 3 Core/Enterprise API documentation + * Tests individual API endpoint pages which use the 3-column layout with tabs + */ +describe('API reference 3-column layout', () => { + // Individual API endpoint pages (not index pages) have the 3-column layout + const layoutSubjects = [ + '/influxdb3/core/api/v3/engine/', + '/influxdb3/enterprise/api/v3/engine/', + ]; + + layoutSubjects.forEach((subject) => { + describe(`${subject} layout`, () => { + beforeEach(() => { + cy.intercept('GET', '**', (req) => { + req.continue((res) => { + if (res.headers['content-type']?.includes('text/html')) { + res.body = res.body.replace( + /data-user-analytics-fingerprint-enabled="true"/, + 'data-user-analytics-fingerprint-enabled="false"' + ); + } + }); + }); + cy.visit(subject); + }); + + describe('Layout Structure', () => { + it('displays sidebar', () => { + cy.get('.sidebar').should('be.visible'); + }); + + it('displays API content area', () => { + cy.get('.api-content, .content-wrapper').should('exist'); + }); + + it('displays TOC on desktop viewport', () => { + cy.viewport(1280, 800); + cy.get('.api-toc').should('be.visible'); + }); + + it('hides TOC on mobile viewport', () => { + cy.viewport(375, 667); + cy.get('.api-toc').should('not.be.visible'); + }); + }); + + describe('API Navigation', () => { + it('displays API navigation section', () => { + cy.get('.api-nav').should('exist'); + }); + + it('has collapsible navigation groups', () => { + cy.get('.api-nav-group').should('have.length.at.least', 1); + }); + + it('toggles group expand/collapse', () => { + cy.get('.api-nav-group-header').first().as('header'); + cy.get('@header').click(); + cy.get('@header') + .should('have.attr', 'aria-expanded') + .and('match', /true|false/); + }); + }); + + describe('Tab Navigation', () => { + it('displays tabs', () => { + cy.get('.api-tabs-wrapper').should('exist'); + }); + + it('shows Operations tab content by default', () => { + cy.get('[data-tab-panel="operations"]').should('be.visible'); + }); + + it('switches tabs on click without page jump', () => { + // Get initial scroll position + cy.window().then((win) => { + const initialScroll = win.scrollY; + + // Click the second tab + cy.get('.api-tabs-nav a').eq(1).click(); + + // Verify tabs are still visible (not jumped away) + cy.get('.api-tabs-wrapper').should('be.visible'); + + // Verify the clicked tab is now active + cy.get('.api-tabs-nav a').eq(1).should('have.class', 'is-active'); + + // Verify the first tab is no longer active + cy.get('.api-tabs-nav a').eq(0).should('not.have.class', 'is-active'); + }); + }); + + it('updates URL hash when switching tabs', () => { + cy.get('.api-tabs-nav a[data-tab="server"]').click(); + cy.url().should('include', '#server'); + }); + + it('restores tab from URL hash on page load', () => { + // Use the current subject URL with hash instead of hardcoded old reference URL + cy.visit(`${subject}#authentication`); + cy.get('.api-tabs-nav a[data-tab="authentication"]').should('have.class', 'is-active'); + cy.get('[data-tab-panel="authentication"]').should('be.visible'); + }); + }); + + describe('Table of Contents', () => { + it('displays TOC header', () => { + cy.viewport(1280, 800); + cy.get('.api-toc-header').should('contain', 'ON THIS PAGE'); + }); + + it('generates TOC from headings', () => { + cy.viewport(1280, 800); + cy.wait(500); // Wait for component initialization + cy.get('.api-toc-nav').should('exist'); + }); + }); + + describe('API Renderer', () => { + it('loads API documentation renderer', () => { + cy.get('.api-reference-container, rapi-doc, .api-reference-wrapper').should('exist'); + }); + + it('displays spec download link', () => { + cy.get('.api-spec-download').should('exist'); + }); + }); + + describe('Accessibility', () => { + it('has ARIA attributes on nav groups', () => { + cy.get('.api-nav-group-header').each(($header) => { + cy.wrap($header).should('have.attr', 'aria-expanded'); + }); + }); + }); + }); + }); +}); From 593309c2b40d31adee55ead3293560531eb9254e Mon Sep 17 00:00:00 2001 From: Jason Stirnaman Date: Mon, 8 Dec 2025 14:08:33 -0600 Subject: [PATCH 13/51] chore: Rebuild API generator scripts and update tests --- .../scripts/dist/generate-openapi-articles.js | 880 +++++++------- .../dist/openapi-paths-to-hugo-data/index.js | 1046 +++++++++-------- api-docs/scripts/generate-openapi-articles.ts | 15 +- .../openapi-paths-to-hugo-data/index.ts | 36 +- cypress/e2e/content/api-reference.cy.js | 68 +- 5 files changed, 1107 insertions(+), 938 deletions(-) diff --git a/api-docs/scripts/dist/generate-openapi-articles.js b/api-docs/scripts/dist/generate-openapi-articles.js index 8fc31252b3..7ed7c26c4f 100644 --- a/api-docs/scripts/dist/generate-openapi-articles.js +++ b/api-docs/scripts/dist/generate-openapi-articles.js @@ -1,5 +1,5 @@ #!/usr/bin/env node -"use strict"; +'use strict'; /** * Generate OpenAPI Articles Script * @@ -20,47 +20,70 @@ * * @module generate-openapi-articles */ -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || (function () { - var ownKeys = function(o) { - ownKeys = Object.getOwnPropertyNames || function (o) { - var ar = []; - for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; - return ar; +var __createBinding = + (this && this.__createBinding) || + (Object.create + ? function (o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if ( + !desc || + ('get' in desc ? !m.__esModule : desc.writable || desc.configurable) + ) { + desc = { + enumerable: true, + get: function () { + return m[k]; + }, + }; + } + Object.defineProperty(o, k2, desc); + } + : function (o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); +var __setModuleDefault = + (this && this.__setModuleDefault) || + (Object.create + ? function (o, v) { + Object.defineProperty(o, 'default', { enumerable: true, value: v }); + } + : function (o, v) { + o['default'] = v; + }); +var __importStar = + (this && this.__importStar) || + (function () { + var ownKeys = function (o) { + ownKeys = + Object.getOwnPropertyNames || + function (o) { + var ar = []; + for (var k in o) + if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; }; - return ownKeys(o); + return ownKeys(o); }; return function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); - __setModuleDefault(result, mod); - return result; + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) + for (var k = ownKeys(mod), i = 0; i < k.length; i++) + if (k[i] !== 'default') __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; }; -})(); -Object.defineProperty(exports, "__esModule", { value: true }); + })(); +Object.defineProperty(exports, '__esModule', { value: true }); exports.productConfigs = void 0; exports.processProduct = processProduct; exports.generateDataFromOpenAPI = generateDataFromOpenAPI; exports.generatePagesFromArticleData = generatePagesFromArticleData; -const child_process_1 = require("child_process"); -const path = __importStar(require("path")); -const fs = __importStar(require("fs")); +const child_process_1 = require('child_process'); +const path = __importStar(require('path')); +const fs = __importStar(require('fs')); // Import the OpenAPI to Hugo converter const openapiPathsToHugo = require('./openapi-paths-to-hugo-data/index.js'); // Calculate the relative paths @@ -74,20 +97,19 @@ const API_DOCS_ROOT = 'api-docs'; * @throws Exits process with code 1 on error */ function execCommand(command, description) { - try { - if (description) { - console.log(`\n${description}...`); - } - console.log(`Executing: ${command}\n`); - (0, child_process_1.execSync)(command, { stdio: 'inherit' }); + try { + if (description) { + console.log(`\n${description}...`); } - catch (error) { - console.error(`\n❌ Error executing command: ${command}`); - if (error instanceof Error) { - console.error(error.message); - } - process.exit(1); + console.log(`Executing: ${command}\n`); + (0, child_process_1.execSync)(command, { stdio: 'inherit' }); + } catch (error) { + console.error(`\n❌ Error executing command: ${command}`); + if (error instanceof Error) { + console.error(error.message); } + process.exit(1); + } } /** * Generate Hugo data files from OpenAPI specification @@ -97,14 +119,14 @@ function execCommand(command, description) { * @param articleOutPath - Output path for article metadata */ function generateDataFromOpenAPI(specFile, dataOutPath, articleOutPath) { - if (!fs.existsSync(dataOutPath)) { - fs.mkdirSync(dataOutPath, { recursive: true }); - } - openapiPathsToHugo.generateHugoData({ - dataOutPath, - articleOutPath, - specFile, - }); + if (!fs.existsSync(dataOutPath)) { + fs.mkdirSync(dataOutPath, { recursive: true }); + } + openapiPathsToHugo.generateHugoData({ + dataOutPath, + articleOutPath, + specFile, + }); } /** * Generate Hugo content pages from article data @@ -115,107 +137,122 @@ function generateDataFromOpenAPI(specFile, dataOutPath, articleOutPath) { * @param options - Generation options */ function generatePagesFromArticleData(options) { - const { articlesPath, contentPath, menuKey, menuParent, productDescription, skipParentMenu, } = options; - const yaml = require('js-yaml'); - const articlesFile = path.join(articlesPath, 'articles.yml'); - if (!fs.existsSync(articlesFile)) { - console.warn(`⚠️ Articles file not found: ${articlesFile}`); - return; - } - // Read articles data - const articlesContent = fs.readFileSync(articlesFile, 'utf8'); - const data = yaml.load(articlesContent); - if (!data.articles || !Array.isArray(data.articles)) { - console.warn(`⚠️ No articles found in ${articlesFile}`); - return; - } - // Ensure content directory exists - if (!fs.existsSync(contentPath)) { - fs.mkdirSync(contentPath, { recursive: true }); + const { + articlesPath, + contentPath, + menuKey, + menuParent, + productDescription, + skipParentMenu, + } = options; + const yaml = require('js-yaml'); + const articlesFile = path.join(articlesPath, 'articles.yml'); + if (!fs.existsSync(articlesFile)) { + console.warn(`⚠️ Articles file not found: ${articlesFile}`); + return; + } + // Read articles data + const articlesContent = fs.readFileSync(articlesFile, 'utf8'); + const data = yaml.load(articlesContent); + if (!data.articles || !Array.isArray(data.articles)) { + console.warn(`⚠️ No articles found in ${articlesFile}`); + return; + } + // Ensure content directory exists + if (!fs.existsSync(contentPath)) { + fs.mkdirSync(contentPath, { recursive: true }); + } + // Determine the API parent directory from the first article's path + // e.g., if article path is "api/v1/health", the API root is "api" + const firstArticlePath = data.articles[0]?.path || ''; + const apiRootDir = firstArticlePath.split('/')[0]; + // Generate parent _index.md for the API section + if (apiRootDir) { + const apiParentDir = path.join(contentPath, apiRootDir); + const parentIndexFile = path.join(apiParentDir, '_index.md'); + if (!fs.existsSync(apiParentDir)) { + fs.mkdirSync(apiParentDir, { recursive: true }); } - // Determine the API parent directory from the first article's path - // e.g., if article path is "api/v1/health", the API root is "api" - const firstArticlePath = data.articles[0]?.path || ''; - const apiRootDir = firstArticlePath.split('/')[0]; - // Generate parent _index.md for the API section - if (apiRootDir) { - const apiParentDir = path.join(contentPath, apiRootDir); - const parentIndexFile = path.join(apiParentDir, '_index.md'); - if (!fs.existsSync(apiParentDir)) { - fs.mkdirSync(apiParentDir, { recursive: true }); - } - if (!fs.existsSync(parentIndexFile)) { - const parentFrontmatter = { - title: menuParent || 'HTTP API', - description: productDescription || - 'API reference documentation for all available endpoints.', - weight: 104, - }; - // Add menu entry for parent page (unless skipParentMenu is true) - if (menuKey && !skipParentMenu) { - parentFrontmatter.menu = { - [menuKey]: { - name: menuParent || 'HTTP API', - }, - }; - } - const parentContent = `--- + if (!fs.existsSync(parentIndexFile)) { + const parentFrontmatter = { + title: menuParent || 'HTTP API', + description: + productDescription || + 'API reference documentation for all available endpoints.', + weight: 104, + }; + // Add menu entry for parent page (unless skipParentMenu is true) + if (menuKey && !skipParentMenu) { + parentFrontmatter.menu = { + [menuKey]: { + name: menuParent || 'HTTP API', + }, + }; + } + const parentContent = `--- ${yaml.dump(parentFrontmatter)}--- `; - fs.writeFileSync(parentIndexFile, parentContent); - console.log(`✓ Generated parent index at ${parentIndexFile}`); - } + fs.writeFileSync(parentIndexFile, parentContent); + console.log(`✓ Generated parent index at ${parentIndexFile}`); } - // Generate a page for each article - for (const article of data.articles) { - const pagePath = path.join(contentPath, article.path); - const pageFile = path.join(pagePath, '_index.md'); - // Create directory if needed - if (!fs.existsSync(pagePath)) { - fs.mkdirSync(pagePath, { recursive: true }); - } - // Build frontmatter object - // Use menuName for display (actual endpoint path like /health) - // Fall back to name or path if menuName is not set - const displayName = article.fields.menuName || article.fields.name || article.path; - const frontmatter = { - title: displayName, - description: `API reference for ${displayName}`, - type: 'api', - // Use explicit layout to override Hugo's default section template lookup - // (Hugo's section lookup ignores `type`, so we need `layout` for the 3-column API layout) - layout: 'list', - staticFilePath: article.fields.staticFilePath, - weight: 100, - }; - // Add menu entry if menuKey is provided - // Use menuName for menu display (shows actual endpoint path like /health) - if (menuKey) { - frontmatter.menu = { - [menuKey]: { - name: displayName, - ...(menuParent && { parent: menuParent }), - }, - }; - } - // Add related links if present in article fields - if (article.fields.related && - Array.isArray(article.fields.related) && - article.fields.related.length > 0) { - frontmatter.related = article.fields.related; - } - // Add OpenAPI tags if present in article fields (for frontmatter metadata) - if (article.fields.apiTags && - Array.isArray(article.fields.apiTags) && - article.fields.apiTags.length > 0) { - frontmatter.api_tags = article.fields.apiTags; - } - const pageContent = `--- + } + // Generate a page for each article + for (const article of data.articles) { + const pagePath = path.join(contentPath, article.path); + const pageFile = path.join(pagePath, '_index.md'); + // Create directory if needed + if (!fs.existsSync(pagePath)) { + fs.mkdirSync(pagePath, { recursive: true }); + } + // Build frontmatter object + // Use menuName for display (actual endpoint path like /health) + // Fall back to name or path if menuName is not set + const displayName = + article.fields.menuName || article.fields.name || article.path; + const frontmatter = { + title: displayName, + description: `API reference for ${displayName}`, + type: 'api', + // Use explicit layout to override Hugo's default section template lookup + // (Hugo's section lookup ignores `type`, so we need `layout` for the 3-column API layout) + layout: 'list', + staticFilePath: article.fields.staticFilePath, + weight: 100, + }; + // Add menu entry if menuKey is provided + // Use menuName for menu display (shows actual endpoint path like /health) + if (menuKey) { + frontmatter.menu = { + [menuKey]: { + name: displayName, + ...(menuParent && { parent: menuParent }), + }, + }; + } + // Add related links if present in article fields + if ( + article.fields.related && + Array.isArray(article.fields.related) && + article.fields.related.length > 0 + ) { + frontmatter.related = article.fields.related; + } + // Add OpenAPI tags if present in article fields (for frontmatter metadata) + if ( + article.fields.apiTags && + Array.isArray(article.fields.apiTags) && + article.fields.apiTags.length > 0 + ) { + frontmatter.api_tags = article.fields.apiTags; + } + const pageContent = `--- ${yaml.dump(frontmatter)}--- `; - fs.writeFileSync(pageFile, pageContent); - } - console.log(`✓ Generated ${data.articles.length} content pages in ${contentPath}`); + fs.writeFileSync(pageFile, pageContent); + } + console.log( + `✓ Generated ${data.articles.length} content pages in ${contentPath}` + ); } /** * Generate Hugo content pages from tag-based article data @@ -227,98 +264,114 @@ ${yaml.dump(frontmatter)}--- * @param options - Generation options */ function generateTagPagesFromArticleData(options) { - const { articlesPath, contentPath, menuKey, menuParent, productDescription, skipParentMenu, } = options; - const yaml = require('js-yaml'); - const articlesFile = path.join(articlesPath, 'articles.yml'); - if (!fs.existsSync(articlesFile)) { - console.warn(`⚠️ Articles file not found: ${articlesFile}`); - return; + const { + articlesPath, + contentPath, + menuKey, + menuParent, + productDescription, + skipParentMenu, + } = options; + const yaml = require('js-yaml'); + const articlesFile = path.join(articlesPath, 'articles.yml'); + if (!fs.existsSync(articlesFile)) { + console.warn(`⚠️ Articles file not found: ${articlesFile}`); + return; + } + // Read articles data + const articlesContent = fs.readFileSync(articlesFile, 'utf8'); + const data = yaml.load(articlesContent); + if (!data.articles || !Array.isArray(data.articles)) { + console.warn(`⚠️ No articles found in ${articlesFile}`); + return; + } + // Ensure content directory exists + if (!fs.existsSync(contentPath)) { + fs.mkdirSync(contentPath, { recursive: true }); + } + // Generate parent _index.md for the API section + const apiParentDir = path.join(contentPath, 'api'); + const parentIndexFile = path.join(apiParentDir, '_index.md'); + if (!fs.existsSync(apiParentDir)) { + fs.mkdirSync(apiParentDir, { recursive: true }); + } + if (!fs.existsSync(parentIndexFile)) { + const parentFrontmatter = { + title: menuParent || 'HTTP API', + description: + productDescription || + 'API reference documentation for all available endpoints.', + weight: 104, + }; + // Add menu entry for parent page (unless skipParentMenu is true) + if (menuKey && !skipParentMenu) { + parentFrontmatter.menu = { + [menuKey]: { + name: menuParent || 'HTTP API', + }, + }; } - // Read articles data - const articlesContent = fs.readFileSync(articlesFile, 'utf8'); - const data = yaml.load(articlesContent); - if (!data.articles || !Array.isArray(data.articles)) { - console.warn(`⚠️ No articles found in ${articlesFile}`); - return; + const parentContent = `--- +${yaml.dump(parentFrontmatter)}--- +`; + fs.writeFileSync(parentIndexFile, parentContent); + console.log(`✓ Generated parent index at ${parentIndexFile}`); + } + // Generate a page for each article (tag) + for (const article of data.articles) { + const pagePath = path.join(contentPath, article.path); + const pageFile = path.join(pagePath, '_index.md'); + // Create directory if needed + if (!fs.existsSync(pagePath)) { + fs.mkdirSync(pagePath, { recursive: true }); } - // Ensure content directory exists - if (!fs.existsSync(contentPath)) { - fs.mkdirSync(contentPath, { recursive: true }); + // Build frontmatter object + const title = article.fields.title || article.fields.name || article.path; + const isConceptual = article.fields.isConceptual === true; + const frontmatter = { + title, + description: article.fields.description || `API reference for ${title}`, + type: 'api', + layout: isConceptual ? 'single' : 'list', + staticFilePath: article.fields.staticFilePath, + weight: 100, + // Tag-based fields + tag: article.fields.tag, + isConceptual, + menuGroup: article.fields.menuGroup, + }; + // Add operations for TOC generation (only for non-conceptual pages) + if ( + !isConceptual && + article.fields.operations && + article.fields.operations.length > 0 + ) { + frontmatter.operations = article.fields.operations; } - // Generate parent _index.md for the API section - const apiParentDir = path.join(contentPath, 'api'); - const parentIndexFile = path.join(apiParentDir, '_index.md'); - if (!fs.existsSync(apiParentDir)) { - fs.mkdirSync(apiParentDir, { recursive: true }); + // Add tag description for conceptual pages + if (isConceptual && article.fields.tagDescription) { + frontmatter.tagDescription = article.fields.tagDescription; } - if (!fs.existsSync(parentIndexFile)) { - const parentFrontmatter = { - title: menuParent || 'HTTP API', - description: productDescription || - 'API reference documentation for all available endpoints.', - weight: 104, - }; - // Add menu entry for parent page (unless skipParentMenu is true) - if (menuKey && !skipParentMenu) { - parentFrontmatter.menu = { - [menuKey]: { - name: menuParent || 'HTTP API', - }, - }; - } - const parentContent = `--- -${yaml.dump(parentFrontmatter)}--- -`; - fs.writeFileSync(parentIndexFile, parentContent); - console.log(`✓ Generated parent index at ${parentIndexFile}`); + // Note: We deliberately don't add menu entries for tag-based API pages. + // The API sidebar navigation (api/sidebar-nav.html) handles navigation + // for API reference pages, avoiding conflicts with existing menu items + // like "Query data" and "Write data" that exist in the main sidebar. + // Add related links if present in article fields + if ( + article.fields.related && + Array.isArray(article.fields.related) && + article.fields.related.length > 0 + ) { + frontmatter.related = article.fields.related; } - // Generate a page for each article (tag) - for (const article of data.articles) { - const pagePath = path.join(contentPath, article.path); - const pageFile = path.join(pagePath, '_index.md'); - // Create directory if needed - if (!fs.existsSync(pagePath)) { - fs.mkdirSync(pagePath, { recursive: true }); - } - // Build frontmatter object - const title = article.fields.title || article.fields.name || article.path; - const isConceptual = article.fields.isConceptual === true; - const frontmatter = { - title, - description: article.fields.description || `API reference for ${title}`, - type: 'api', - layout: isConceptual ? 'single' : 'list', - staticFilePath: article.fields.staticFilePath, - weight: 100, - // Tag-based fields - tag: article.fields.tag, - isConceptual, - menuGroup: article.fields.menuGroup, - }; - // Add operations for TOC generation (only for non-conceptual pages) - if (!isConceptual && article.fields.operations && article.fields.operations.length > 0) { - frontmatter.operations = article.fields.operations; - } - // Add tag description for conceptual pages - if (isConceptual && article.fields.tagDescription) { - frontmatter.tagDescription = article.fields.tagDescription; - } - // Note: We deliberately don't add menu entries for tag-based API pages. - // The API sidebar navigation (api/sidebar-nav.html) handles navigation - // for API reference pages, avoiding conflicts with existing menu items - // like "Query data" and "Write data" that exist in the main sidebar. - // Add related links if present in article fields - if (article.fields.related && - Array.isArray(article.fields.related) && - article.fields.related.length > 0) { - frontmatter.related = article.fields.related; - } - const pageContent = `--- + const pageContent = `--- ${yaml.dump(frontmatter)}--- `; - fs.writeFileSync(pageFile, pageContent); - } - console.log(`✓ Generated ${data.articles.length} tag-based content pages in ${contentPath}`); + fs.writeFileSync(pageFile, pageContent); + } + console.log( + `✓ Generated ${data.articles.length} tag-based content pages in ${contentPath}` + ); } /** * Product configurations for all InfluxDB editions @@ -326,61 +379,70 @@ ${yaml.dump(frontmatter)}--- * Maps product identifiers to their OpenAPI specs and content directories */ const productConfigs = { - // TODO: v2 products (cloud-v2, oss-v2) are disabled for now because they - // have existing Redoc-based API reference at /reference/api/ - // Uncomment when ready to migrate v2 products to Scalar - // 'cloud-v2': { - // specFile: path.join(API_DOCS_ROOT, 'influxdb/cloud/v2/ref.yml'), - // pagesDir: path.join(DOCS_ROOT, 'content/influxdb/cloud/api'), - // description: 'InfluxDB Cloud (v2 API)', - // menuKey: 'influxdb_cloud', - // }, - // 'oss-v2': { - // specFile: path.join(API_DOCS_ROOT, 'influxdb/v2/v2/ref.yml'), - // pagesDir: path.join(DOCS_ROOT, 'content/influxdb/v2/api'), - // description: 'InfluxDB OSS v2', - // menuKey: 'influxdb_v2', - // }, - // InfluxDB 3 products use tag-based generation for better UX - 'influxdb3-core': { - specFile: path.join(API_DOCS_ROOT, 'influxdb3/core/v3/ref.yml'), - pagesDir: path.join(DOCS_ROOT, 'content/influxdb3/core'), - description: 'InfluxDB 3 Core', - menuKey: 'influxdb3_core', - useTagBasedGeneration: true, - }, - 'influxdb3-enterprise': { - specFile: path.join(API_DOCS_ROOT, 'influxdb3/enterprise/v3/ref.yml'), - pagesDir: path.join(DOCS_ROOT, 'content/influxdb3/enterprise'), - description: 'InfluxDB 3 Enterprise', - menuKey: 'influxdb3_enterprise', - useTagBasedGeneration: true, - }, - // Note: Cloud Dedicated, Serverless, and Clustered use management APIs - // with paths like /accounts/{accountId}/... so we put them under /api/ - // These products have existing /reference/api/ pages with menu entries, - // so we skip adding menu entries to the generated parent pages. - 'cloud-dedicated': { - specFile: path.join(API_DOCS_ROOT, 'influxdb3/cloud-dedicated/management/openapi.yml'), - pagesDir: path.join(DOCS_ROOT, 'content/influxdb3/cloud-dedicated/api'), - description: 'InfluxDB Cloud Dedicated', - menuKey: 'influxdb3_cloud_dedicated', - skipParentMenu: true, - }, - 'cloud-serverless': { - specFile: path.join(API_DOCS_ROOT, 'influxdb3/cloud-serverless/management/openapi.yml'), - pagesDir: path.join(DOCS_ROOT, 'content/influxdb3/cloud-serverless/api'), - description: 'InfluxDB Cloud Serverless', - menuKey: 'influxdb3_cloud_serverless', - skipParentMenu: true, - }, - clustered: { - specFile: path.join(API_DOCS_ROOT, 'influxdb3/clustered/management/openapi.yml'), - pagesDir: path.join(DOCS_ROOT, 'content/influxdb3/clustered/api'), - description: 'InfluxDB Clustered', - menuKey: 'influxdb3_clustered', - skipParentMenu: true, - }, + // TODO: v2 products (cloud-v2, oss-v2) are disabled for now because they + // have existing Redoc-based API reference at /reference/api/ + // Uncomment when ready to migrate v2 products to Scalar + // 'cloud-v2': { + // specFile: path.join(API_DOCS_ROOT, 'influxdb/cloud/v2/ref.yml'), + // pagesDir: path.join(DOCS_ROOT, 'content/influxdb/cloud/api'), + // description: 'InfluxDB Cloud (v2 API)', + // menuKey: 'influxdb_cloud', + // }, + // 'oss-v2': { + // specFile: path.join(API_DOCS_ROOT, 'influxdb/v2/v2/ref.yml'), + // pagesDir: path.join(DOCS_ROOT, 'content/influxdb/v2/api'), + // description: 'InfluxDB OSS v2', + // menuKey: 'influxdb_v2', + // }, + // InfluxDB 3 products use tag-based generation for better UX + 'influxdb3-core': { + specFile: path.join(API_DOCS_ROOT, 'influxdb3/core/v3/ref.yml'), + pagesDir: path.join(DOCS_ROOT, 'content/influxdb3/core'), + description: 'InfluxDB 3 Core', + menuKey: 'influxdb3_core', + useTagBasedGeneration: true, + }, + 'influxdb3-enterprise': { + specFile: path.join(API_DOCS_ROOT, 'influxdb3/enterprise/v3/ref.yml'), + pagesDir: path.join(DOCS_ROOT, 'content/influxdb3/enterprise'), + description: 'InfluxDB 3 Enterprise', + menuKey: 'influxdb3_enterprise', + useTagBasedGeneration: true, + }, + // Note: Cloud Dedicated, Serverless, and Clustered use management APIs + // with paths like /accounts/{accountId}/... so we put them under /api/ + // These products have existing /reference/api/ pages with menu entries, + // so we skip adding menu entries to the generated parent pages. + 'cloud-dedicated': { + specFile: path.join( + API_DOCS_ROOT, + 'influxdb3/cloud-dedicated/management/openapi.yml' + ), + pagesDir: path.join(DOCS_ROOT, 'content/influxdb3/cloud-dedicated/api'), + description: 'InfluxDB Cloud Dedicated', + menuKey: 'influxdb3_cloud_dedicated', + skipParentMenu: true, + }, + 'cloud-serverless': { + specFile: path.join( + API_DOCS_ROOT, + 'influxdb3/cloud-serverless/management/openapi.yml' + ), + pagesDir: path.join(DOCS_ROOT, 'content/influxdb3/cloud-serverless/api'), + description: 'InfluxDB Cloud Serverless', + menuKey: 'influxdb3_cloud_serverless', + skipParentMenu: true, + }, + clustered: { + specFile: path.join( + API_DOCS_ROOT, + 'influxdb3/clustered/management/openapi.yml' + ), + pagesDir: path.join(DOCS_ROOT, 'content/influxdb3/clustered/api'), + description: 'InfluxDB Clustered', + menuKey: 'influxdb3_clustered', + skipParentMenu: true, + }, }; exports.productConfigs = productConfigs; /** @@ -390,127 +452,145 @@ exports.productConfigs = productConfigs; * @param config - Product configuration */ function processProduct(productKey, config) { - console.log('\n' + '='.repeat(80)); - console.log(`Processing ${config.description || productKey}`); - console.log('='.repeat(80)); - const staticPath = path.join(DOCS_ROOT, 'static/openapi'); - const staticSpecPath = path.join(staticPath, `influxdb-${productKey}.yml`); - const staticJsonSpecPath = path.join(staticPath, `influxdb-${productKey}.json`); - const staticPathsPath = path.join(staticPath, `influxdb-${productKey}/paths`); - const articlesPath = path.join(DOCS_ROOT, `data/article-data/influxdb/${productKey}`); - // Check if spec file exists - if (!fs.existsSync(config.specFile)) { - console.warn(`⚠️ Spec file not found: ${config.specFile}`); - console.log('Skipping this product. Run getswagger.sh first if needed.\n'); - return; + console.log('\n' + '='.repeat(80)); + console.log(`Processing ${config.description || productKey}`); + console.log('='.repeat(80)); + const staticPath = path.join(DOCS_ROOT, 'static/openapi'); + const staticSpecPath = path.join(staticPath, `influxdb-${productKey}.yml`); + const staticJsonSpecPath = path.join( + staticPath, + `influxdb-${productKey}.json` + ); + const staticPathsPath = path.join(staticPath, `influxdb-${productKey}/paths`); + const articlesPath = path.join( + DOCS_ROOT, + `data/article-data/influxdb/${productKey}` + ); + // Check if spec file exists + if (!fs.existsSync(config.specFile)) { + console.warn(`⚠️ Spec file not found: ${config.specFile}`); + console.log('Skipping this product. Run getswagger.sh first if needed.\n'); + return; + } + try { + // Step 1: Execute the getswagger.sh script to fetch/bundle the spec + // Note: getswagger.sh must run from api-docs/ because it uses relative paths + const getswaggerScript = path.join(API_DOCS_ROOT, 'getswagger.sh'); + if (fs.existsSync(getswaggerScript)) { + execCommand( + `cd ${API_DOCS_ROOT} && ./getswagger.sh ${productKey} -B`, + `Fetching OpenAPI spec for ${productKey}` + ); + } else { + console.log(`⚠️ getswagger.sh not found, skipping fetch step`); } - try { - // Step 1: Execute the getswagger.sh script to fetch/bundle the spec - // Note: getswagger.sh must run from api-docs/ because it uses relative paths - const getswaggerScript = path.join(API_DOCS_ROOT, 'getswagger.sh'); - if (fs.existsSync(getswaggerScript)) { - execCommand(`cd ${API_DOCS_ROOT} && ./getswagger.sh ${productKey} -B`, `Fetching OpenAPI spec for ${productKey}`); - } - else { - console.log(`⚠️ getswagger.sh not found, skipping fetch step`); - } - // Step 2: Ensure static directory exists - if (!fs.existsSync(staticPath)) { - fs.mkdirSync(staticPath, { recursive: true }); - } - // Step 3: Copy the generated OpenAPI spec to static folder (YAML) - if (fs.existsSync(config.specFile)) { - fs.copyFileSync(config.specFile, staticSpecPath); - console.log(`✓ Copied spec to ${staticSpecPath}`); - // Step 4: Generate JSON version of the spec - try { - const yaml = require('js-yaml'); - const specContent = fs.readFileSync(config.specFile, 'utf8'); - const specObject = yaml.load(specContent); - fs.writeFileSync(staticJsonSpecPath, JSON.stringify(specObject, null, 2)); - console.log(`✓ Generated JSON spec at ${staticJsonSpecPath}`); - } - catch (jsonError) { - console.warn(`⚠️ Could not generate JSON spec: ${jsonError}`); - } - } - // Step 5: Generate Hugo data from OpenAPI spec - if (config.useTagBasedGeneration) { - // Tag-based generation: group operations by OpenAPI tag - const staticTagsPath = path.join(staticPath, `influxdb-${productKey}/tags`); - console.log(`\n📋 Using tag-based generation for ${productKey}...`); - openapiPathsToHugo.generateHugoDataByTag({ - specFile: config.specFile, - dataOutPath: staticTagsPath, - articleOutPath: articlesPath, - includePaths: true, // Also generate path-based files for backwards compatibility - }); - // Step 6: Generate Hugo content pages from tag-based article data - generateTagPagesFromArticleData({ - articlesPath, - contentPath: config.pagesDir, - menuKey: config.menuKey, - menuParent: 'InfluxDB HTTP API', - skipParentMenu: config.skipParentMenu, - }); - } - else { - // Path-based generation: group paths by URL prefix (legacy) - generateDataFromOpenAPI(config.specFile, staticPathsPath, articlesPath); - // Step 6: Generate Hugo content pages from path-based article data - generatePagesFromArticleData({ - articlesPath, - contentPath: config.pagesDir, - menuKey: config.menuKey, - menuParent: 'InfluxDB HTTP API', - skipParentMenu: config.skipParentMenu, - }); - } - console.log(`\n✅ Successfully processed ${config.description || productKey}\n`); + // Step 2: Ensure static directory exists + if (!fs.existsSync(staticPath)) { + fs.mkdirSync(staticPath, { recursive: true }); + } + // Step 3: Copy the generated OpenAPI spec to static folder (YAML) + if (fs.existsSync(config.specFile)) { + fs.copyFileSync(config.specFile, staticSpecPath); + console.log(`✓ Copied spec to ${staticSpecPath}`); + // Step 4: Generate JSON version of the spec + try { + const yaml = require('js-yaml'); + const specContent = fs.readFileSync(config.specFile, 'utf8'); + const specObject = yaml.load(specContent); + fs.writeFileSync( + staticJsonSpecPath, + JSON.stringify(specObject, null, 2) + ); + console.log(`✓ Generated JSON spec at ${staticJsonSpecPath}`); + } catch (jsonError) { + console.warn(`⚠️ Could not generate JSON spec: ${jsonError}`); + } } - catch (error) { - console.error(`\n❌ Error processing ${productKey}:`, error); - process.exit(1); + // Step 5: Generate Hugo data from OpenAPI spec + if (config.useTagBasedGeneration) { + // Tag-based generation: group operations by OpenAPI tag + const staticTagsPath = path.join( + staticPath, + `influxdb-${productKey}/tags` + ); + console.log(`\n📋 Using tag-based generation for ${productKey}...`); + openapiPathsToHugo.generateHugoDataByTag({ + specFile: config.specFile, + dataOutPath: staticTagsPath, + articleOutPath: articlesPath, + includePaths: true, // Also generate path-based files for backwards compatibility + }); + // Step 6: Generate Hugo content pages from tag-based article data + generateTagPagesFromArticleData({ + articlesPath, + contentPath: config.pagesDir, + menuKey: config.menuKey, + menuParent: 'InfluxDB HTTP API', + skipParentMenu: config.skipParentMenu, + }); + } else { + // Path-based generation: group paths by URL prefix (legacy) + generateDataFromOpenAPI(config.specFile, staticPathsPath, articlesPath); + // Step 6: Generate Hugo content pages from path-based article data + generatePagesFromArticleData({ + articlesPath, + contentPath: config.pagesDir, + menuKey: config.menuKey, + menuParent: 'InfluxDB HTTP API', + skipParentMenu: config.skipParentMenu, + }); } + console.log( + `\n✅ Successfully processed ${config.description || productKey}\n` + ); + } catch (error) { + console.error(`\n❌ Error processing ${productKey}:`, error); + process.exit(1); + } } /** * Main execution function */ function main() { - const args = process.argv.slice(2); - // Determine which products to process - let productsToProcess; - if (args.length === 0) { - // No arguments: process all products - productsToProcess = Object.keys(productConfigs); - console.log('\n📋 Processing all products...\n'); - } - else { - // Arguments provided: process only specified products - productsToProcess = args; - console.log(`\n📋 Processing specified products: ${productsToProcess.join(', ')}\n`); - } - // Validate product keys - const invalidProducts = productsToProcess.filter((key) => !productConfigs[key]); - if (invalidProducts.length > 0) { - console.error(`\n❌ Invalid product identifier(s): ${invalidProducts.join(', ')}`); - console.error('\nValid products:'); - Object.keys(productConfigs).forEach((key) => { - console.error(` - ${key}: ${productConfigs[key].description}`); - }); - process.exit(1); - } - // Process each product - productsToProcess.forEach((productKey) => { - const config = productConfigs[productKey]; - processProduct(productKey, config); + const args = process.argv.slice(2); + // Determine which products to process + let productsToProcess; + if (args.length === 0) { + // No arguments: process all products + productsToProcess = Object.keys(productConfigs); + console.log('\n📋 Processing all products...\n'); + } else { + // Arguments provided: process only specified products + productsToProcess = args; + console.log( + `\n📋 Processing specified products: ${productsToProcess.join(', ')}\n` + ); + } + // Validate product keys + const invalidProducts = productsToProcess.filter( + (key) => !productConfigs[key] + ); + if (invalidProducts.length > 0) { + console.error( + `\n❌ Invalid product identifier(s): ${invalidProducts.join(', ')}` + ); + console.error('\nValid products:'); + Object.keys(productConfigs).forEach((key) => { + console.error(` - ${key}: ${productConfigs[key].description}`); }); - console.log('\n' + '='.repeat(80)); - console.log('✅ All products processed successfully!'); - console.log('='.repeat(80) + '\n'); + process.exit(1); + } + // Process each product + productsToProcess.forEach((productKey) => { + const config = productConfigs[productKey]; + processProduct(productKey, config); + }); + console.log('\n' + '='.repeat(80)); + console.log('✅ All products processed successfully!'); + console.log('='.repeat(80) + '\n'); } // Execute if run directly if (require.main === module) { - main(); + main(); } -//# sourceMappingURL=generate-openapi-articles.js.map \ No newline at end of file +//# sourceMappingURL=generate-openapi-articles.js.map diff --git a/api-docs/scripts/dist/openapi-paths-to-hugo-data/index.js b/api-docs/scripts/dist/openapi-paths-to-hugo-data/index.js index cd977cfce3..b81d170304 100644 --- a/api-docs/scripts/dist/openapi-paths-to-hugo-data/index.js +++ b/api-docs/scripts/dist/openapi-paths-to-hugo-data/index.js @@ -1,4 +1,4 @@ -"use strict"; +'use strict'; /** * OpenAPI to Hugo Data Converter * @@ -7,45 +7,68 @@ * * @module openapi-paths-to-hugo-data */ -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || (function () { - var ownKeys = function(o) { - ownKeys = Object.getOwnPropertyNames || function (o) { - var ar = []; - for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; - return ar; +var __createBinding = + (this && this.__createBinding) || + (Object.create + ? function (o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if ( + !desc || + ('get' in desc ? !m.__esModule : desc.writable || desc.configurable) + ) { + desc = { + enumerable: true, + get: function () { + return m[k]; + }, + }; + } + Object.defineProperty(o, k2, desc); + } + : function (o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); +var __setModuleDefault = + (this && this.__setModuleDefault) || + (Object.create + ? function (o, v) { + Object.defineProperty(o, 'default', { enumerable: true, value: v }); + } + : function (o, v) { + o['default'] = v; + }); +var __importStar = + (this && this.__importStar) || + (function () { + var ownKeys = function (o) { + ownKeys = + Object.getOwnPropertyNames || + function (o) { + var ar = []; + for (var k in o) + if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; }; - return ownKeys(o); + return ownKeys(o); }; return function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); - __setModuleDefault(result, mod); - return result; + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) + for (var k = ownKeys(mod), i = 0; i < k.length; i++) + if (k[i] !== 'default') __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; }; -})(); -Object.defineProperty(exports, "__esModule", { value: true }); + })(); +Object.defineProperty(exports, '__esModule', { value: true }); exports.generateHugoDataByTag = generateHugoDataByTag; exports.generateHugoData = generateHugoData; -const yaml = __importStar(require("js-yaml")); -const fs = __importStar(require("fs")); -const path = __importStar(require("path")); +const yaml = __importStar(require('js-yaml')); +const fs = __importStar(require('fs')); +const path = __importStar(require('path')); /** * Read a YAML file and parse it * @@ -54,8 +77,8 @@ const path = __importStar(require("path")); * @returns Parsed YAML content */ function readFile(filepath, encoding = 'utf8') { - const content = fs.readFileSync(filepath, encoding); - return yaml.load(content); + const content = fs.readFileSync(filepath, encoding); + return yaml.load(content); } /** * Write data to a YAML file @@ -64,7 +87,7 @@ function readFile(filepath, encoding = 'utf8') { * @param outputTo - Output file path */ function writeDataFile(data, outputTo) { - fs.writeFileSync(outputTo, yaml.dump(data)); + fs.writeFileSync(outputTo, yaml.dump(data)); } /** * Write data to a JSON file @@ -73,22 +96,22 @@ function writeDataFile(data, outputTo) { * @param outputTo - Output file path */ function writeJsonFile(data, outputTo) { - fs.writeFileSync(outputTo, JSON.stringify(data, null, 2)); + fs.writeFileSync(outputTo, JSON.stringify(data, null, 2)); } /** * OpenAPI utility functions */ const openapiUtils = { - /** - * Check if a path fragment is a placeholder (e.g., {id}) - * - * @param str - Path fragment to check - * @returns True if the fragment is a placeholder - */ - isPlaceholderFragment(str) { - const placeholderRegex = /^\{.*\}$/; - return placeholderRegex.test(str); - }, + /** + * Check if a path fragment is a placeholder (e.g., {id}) + * + * @param str - Path fragment to check + * @returns True if the fragment is a placeholder + */ + isPlaceholderFragment(str) { + const placeholderRegex = /^\{.*\}$/; + return placeholderRegex.test(str); + }, }; /** * Convert tag name to URL-friendly slug @@ -97,35 +120,35 @@ const openapiUtils = { * @returns URL-friendly slug (e.g., "write-data", "processing-engine") */ function slugifyTag(tagName) { - return tagName - .toLowerCase() - .replace(/[^a-z0-9]+/g, '-') - .replace(/^-|-$/g, ''); + return tagName + .toLowerCase() + .replace(/[^a-z0-9]+/g, '-') + .replace(/^-|-$/g, ''); } /** * Menu group mappings for tag-based navigation * Maps OpenAPI tags to sidebar groups */ const TAG_MENU_GROUPS = { - // Concepts group - 'Quick start': 'Concepts', - 'Authentication': 'Concepts', - 'Headers and parameters': 'Concepts', - 'Response codes': 'Concepts', - // Data Operations group - 'Write data': 'Data Operations', - 'Query data': 'Data Operations', - 'Cache data': 'Data Operations', - // Administration group - 'Database': 'Administration', - 'Table': 'Administration', - 'Token': 'Administration', - // Processing Engine group - 'Processing engine': 'Processing Engine', - // Server group - 'Server information': 'Server', - // Compatibility group - 'Compatibility endpoints': 'Compatibility', + // Concepts group + 'Quick start': 'Concepts', + Authentication: 'Concepts', + 'Headers and parameters': 'Concepts', + 'Response codes': 'Concepts', + // Data Operations group + 'Write data': 'Data Operations', + 'Query data': 'Data Operations', + 'Cache data': 'Data Operations', + // Administration group + Database: 'Administration', + Table: 'Administration', + Token: 'Administration', + // Processing Engine group + 'Processing engine': 'Processing Engine', + // Server group + 'Server information': 'Server', + // Compatibility group + 'Compatibility endpoints': 'Compatibility', }; /** * Get menu group for a tag @@ -134,20 +157,20 @@ const TAG_MENU_GROUPS = { * @returns Menu group name or 'Other' if not mapped */ function getMenuGroupForTag(tagName) { - return TAG_MENU_GROUPS[tagName] || 'Other'; + return TAG_MENU_GROUPS[tagName] || 'Other'; } /** * HTTP methods to check for operations */ const HTTP_METHODS = [ - 'get', - 'post', - 'put', - 'patch', - 'delete', - 'options', - 'head', - 'trace', + 'get', + 'post', + 'put', + 'patch', + 'delete', + 'options', + 'head', + 'trace', ]; /** * Extract all operations from an OpenAPI document grouped by tag @@ -156,29 +179,29 @@ const HTTP_METHODS = [ * @returns Map of tag name to operations with that tag */ function extractOperationsByTag(openapi) { - const tagOperations = new Map(); - Object.entries(openapi.paths).forEach(([pathKey, pathItem]) => { - HTTP_METHODS.forEach((method) => { - const operation = pathItem[method]; - if (operation) { - const opMeta = { - operationId: operation.operationId || `${method}-${pathKey}`, - method: method.toUpperCase(), - path: pathKey, - summary: operation.summary || '', - tags: operation.tags || [], - }; - // Add operation to each of its tags - (operation.tags || []).forEach((tag) => { - if (!tagOperations.has(tag)) { - tagOperations.set(tag, []); - } - tagOperations.get(tag).push(opMeta); - }); - } + const tagOperations = new Map(); + Object.entries(openapi.paths).forEach(([pathKey, pathItem]) => { + HTTP_METHODS.forEach((method) => { + const operation = pathItem[method]; + if (operation) { + const opMeta = { + operationId: operation.operationId || `${method}-${pathKey}`, + method: method.toUpperCase(), + path: pathKey, + summary: operation.summary || '', + tags: operation.tags || [], + }; + // Add operation to each of its tags + (operation.tags || []).forEach((tag) => { + if (!tagOperations.has(tag)) { + tagOperations.set(tag, []); + } + tagOperations.get(tag).push(opMeta); }); + } }); - return tagOperations; + }); + return tagOperations; } /** * Write OpenAPI specs grouped by tag to separate files @@ -189,79 +212,81 @@ function extractOperationsByTag(openapi) { * @param outPath - Output directory path */ function writeTagOpenapis(openapi, prefix, outPath) { - const tagOperations = extractOperationsByTag(openapi); - // Process each tag - tagOperations.forEach((operations, tagName) => { - // Deep copy openapi - const doc = JSON.parse(JSON.stringify(openapi)); - // Filter paths to only include those with operations for this tag - const filteredPaths = {}; - Object.entries(openapi.paths).forEach(([pathKey, pathItem]) => { - const filteredPathItem = {}; - let hasOperations = false; - HTTP_METHODS.forEach((method) => { - const operation = pathItem[method]; - if (operation?.tags?.includes(tagName)) { - filteredPathItem[method] = operation; - hasOperations = true; - } - }); - // Include path-level parameters if we have operations - if (hasOperations) { - if (pathItem.parameters) { - filteredPathItem.parameters = pathItem.parameters; - } - filteredPaths[pathKey] = filteredPathItem; - } - }); - doc.paths = filteredPaths; - // Filter tags to only include this tag (and trait tags for context) - if (doc.tags) { - doc.tags = doc.tags.filter((tag) => tag.name === tagName || tag['x-traitTag']); - } - // Update info - const tagSlug = slugifyTag(tagName); - doc.info.title = tagName; - doc.info.description = `API reference for ${tagName}`; - doc['x-tagGroup'] = tagName; - try { - if (!fs.existsSync(outPath)) { - fs.mkdirSync(outPath, { recursive: true }); - } - const baseFilename = `${prefix}${tagSlug}`; - const yamlPath = path.resolve(outPath, `${baseFilename}.yaml`); - const jsonPath = path.resolve(outPath, `${baseFilename}.json`); - writeDataFile(doc, yamlPath); - writeJsonFile(doc, jsonPath); - console.log(`Generated tag spec: ${baseFilename}.yaml (${Object.keys(filteredPaths).length} paths, ${operations.length} operations)`); - } - catch (err) { - console.error(`Error writing tag group ${tagName}:`, err); + const tagOperations = extractOperationsByTag(openapi); + // Process each tag + tagOperations.forEach((operations, tagName) => { + // Deep copy openapi + const doc = JSON.parse(JSON.stringify(openapi)); + // Filter paths to only include those with operations for this tag + const filteredPaths = {}; + Object.entries(openapi.paths).forEach(([pathKey, pathItem]) => { + const filteredPathItem = {}; + let hasOperations = false; + HTTP_METHODS.forEach((method) => { + const operation = pathItem[method]; + if (operation?.tags?.includes(tagName)) { + filteredPathItem[method] = operation; + hasOperations = true; } - }); - // Also create specs for conceptual tags (x-traitTag) without operations - (openapi.tags || []).forEach((tag) => { - if (tag['x-traitTag'] && !tagOperations.has(tag.name)) { - const doc = JSON.parse(JSON.stringify(openapi)); - doc.paths = {}; - doc.tags = [tag]; - doc.info.title = tag.name; - doc.info.description = tag.description || `API reference for ${tag.name}`; - doc['x-tagGroup'] = tag.name; - const tagSlug = slugifyTag(tag.name); - try { - const baseFilename = `${prefix}${tagSlug}`; - const yamlPath = path.resolve(outPath, `${baseFilename}.yaml`); - const jsonPath = path.resolve(outPath, `${baseFilename}.json`); - writeDataFile(doc, yamlPath); - writeJsonFile(doc, jsonPath); - console.log(`Generated conceptual tag spec: ${baseFilename}.yaml`); - } - catch (err) { - console.error(`Error writing conceptual tag ${tag.name}:`, err); - } + }); + // Include path-level parameters if we have operations + if (hasOperations) { + if (pathItem.parameters) { + filteredPathItem.parameters = pathItem.parameters; } + filteredPaths[pathKey] = filteredPathItem; + } }); + doc.paths = filteredPaths; + // Filter tags to only include this tag (and trait tags for context) + if (doc.tags) { + doc.tags = doc.tags.filter( + (tag) => tag.name === tagName || tag['x-traitTag'] + ); + } + // Update info + const tagSlug = slugifyTag(tagName); + doc.info.title = tagName; + doc.info.description = `API reference for ${tagName}`; + doc['x-tagGroup'] = tagName; + try { + if (!fs.existsSync(outPath)) { + fs.mkdirSync(outPath, { recursive: true }); + } + const baseFilename = `${prefix}${tagSlug}`; + const yamlPath = path.resolve(outPath, `${baseFilename}.yaml`); + const jsonPath = path.resolve(outPath, `${baseFilename}.json`); + writeDataFile(doc, yamlPath); + writeJsonFile(doc, jsonPath); + console.log( + `Generated tag spec: ${baseFilename}.yaml (${Object.keys(filteredPaths).length} paths, ${operations.length} operations)` + ); + } catch (err) { + console.error(`Error writing tag group ${tagName}:`, err); + } + }); + // Also create specs for conceptual tags (x-traitTag) without operations + (openapi.tags || []).forEach((tag) => { + if (tag['x-traitTag'] && !tagOperations.has(tag.name)) { + const doc = JSON.parse(JSON.stringify(openapi)); + doc.paths = {}; + doc.tags = [tag]; + doc.info.title = tag.name; + doc.info.description = tag.description || `API reference for ${tag.name}`; + doc['x-tagGroup'] = tag.name; + const tagSlug = slugifyTag(tag.name); + try { + const baseFilename = `${prefix}${tagSlug}`; + const yamlPath = path.resolve(outPath, `${baseFilename}.yaml`); + const jsonPath = path.resolve(outPath, `${baseFilename}.json`); + writeDataFile(doc, yamlPath); + writeJsonFile(doc, jsonPath); + console.log(`Generated conceptual tag spec: ${baseFilename}.yaml`); + } catch (err) { + console.error(`Error writing conceptual tag ${tag.name}:`, err); + } + } + }); } /** * Write OpenAPI specs grouped by path to separate files @@ -272,79 +297,80 @@ function writeTagOpenapis(openapi, prefix, outPath) { * @param outPath - Output directory path */ function writePathOpenapis(openapi, prefix, outPath) { - const pathGroups = {}; - // Group paths by their base path (first 3-4 segments, excluding placeholders) - Object.keys(openapi.paths) - .sort() - .forEach((p) => { - const delimiter = '/'; - let key = p.split(delimiter); - // Check if this is an item path (ends with a placeholder) - let isItemPath = openapiUtils.isPlaceholderFragment(key[key.length - 1]); - if (isItemPath) { - key = key.slice(0, -1); - } - // Take first 4 segments - key = key.slice(0, 4); - // Check if the last segment is still a placeholder - isItemPath = openapiUtils.isPlaceholderFragment(key[key.length - 1]); - if (isItemPath) { - key = key.slice(0, -1); - } - const groupKey = key.join('/'); - pathGroups[groupKey] = pathGroups[groupKey] || {}; - pathGroups[groupKey][p] = openapi.paths[p]; + const pathGroups = {}; + // Group paths by their base path (first 3-4 segments, excluding placeholders) + Object.keys(openapi.paths) + .sort() + .forEach((p) => { + const delimiter = '/'; + let key = p.split(delimiter); + // Check if this is an item path (ends with a placeholder) + let isItemPath = openapiUtils.isPlaceholderFragment(key[key.length - 1]); + if (isItemPath) { + key = key.slice(0, -1); + } + // Take first 4 segments + key = key.slice(0, 4); + // Check if the last segment is still a placeholder + isItemPath = openapiUtils.isPlaceholderFragment(key[key.length - 1]); + if (isItemPath) { + key = key.slice(0, -1); + } + const groupKey = key.join('/'); + pathGroups[groupKey] = pathGroups[groupKey] || {}; + pathGroups[groupKey][p] = openapi.paths[p]; }); - // Write each path group to separate YAML and JSON files - Object.keys(pathGroups).forEach((pg) => { - // Deep copy openapi - const doc = JSON.parse(JSON.stringify(openapi)); - doc.paths = pathGroups[pg]; - // Collect tags used by operations in this path group - const usedTags = new Set(); - Object.values(doc.paths).forEach((pathItem) => { - const httpMethods = [ - 'get', - 'post', - 'put', - 'patch', - 'delete', - 'options', - 'head', - 'trace', - ]; - httpMethods.forEach((method) => { - const operation = pathItem[method]; - if (operation?.tags) { - operation.tags.forEach((tag) => usedTags.add(tag)); - } - }); - }); - // Filter tags to only include those used by operations in this path group - // Exclude x-traitTag tags (supplementary documentation tags) - if (doc.tags) { - doc.tags = doc.tags.filter((tag) => usedTags.has(tag.name) && !tag['x-traitTag']); - } - // Simplify info for path-specific docs - doc.info.title = pg; - doc.info.description = `API reference for ${pg}`; - doc['x-pathGroup'] = pg; - try { - if (!fs.existsSync(outPath)) { - fs.mkdirSync(outPath, { recursive: true }); - } - const baseFilename = `${prefix}${pg.replaceAll('/', '-').replace(/^-/, '')}`; - const yamlPath = path.resolve(outPath, `${baseFilename}.yaml`); - const jsonPath = path.resolve(outPath, `${baseFilename}.json`); - // Write both YAML and JSON versions - writeDataFile(doc, yamlPath); - writeJsonFile(doc, jsonPath); - console.log(`Generated: ${baseFilename}.yaml and ${baseFilename}.json`); - } - catch (err) { - console.error(`Error writing path group ${pg}:`, err); + // Write each path group to separate YAML and JSON files + Object.keys(pathGroups).forEach((pg) => { + // Deep copy openapi + const doc = JSON.parse(JSON.stringify(openapi)); + doc.paths = pathGroups[pg]; + // Collect tags used by operations in this path group + const usedTags = new Set(); + Object.values(doc.paths).forEach((pathItem) => { + const httpMethods = [ + 'get', + 'post', + 'put', + 'patch', + 'delete', + 'options', + 'head', + 'trace', + ]; + httpMethods.forEach((method) => { + const operation = pathItem[method]; + if (operation?.tags) { + operation.tags.forEach((tag) => usedTags.add(tag)); } + }); }); + // Filter tags to only include those used by operations in this path group + // Exclude x-traitTag tags (supplementary documentation tags) + if (doc.tags) { + doc.tags = doc.tags.filter( + (tag) => usedTags.has(tag.name) && !tag['x-traitTag'] + ); + } + // Simplify info for path-specific docs + doc.info.title = pg; + doc.info.description = `API reference for ${pg}`; + doc['x-pathGroup'] = pg; + try { + if (!fs.existsSync(outPath)) { + fs.mkdirSync(outPath, { recursive: true }); + } + const baseFilename = `${prefix}${pg.replaceAll('/', '-').replace(/^-/, '')}`; + const yamlPath = path.resolve(outPath, `${baseFilename}.yaml`); + const jsonPath = path.resolve(outPath, `${baseFilename}.json`); + // Write both YAML and JSON versions + writeDataFile(doc, yamlPath); + writeJsonFile(doc, jsonPath); + console.log(`Generated: ${baseFilename}.yaml and ${baseFilename}.json`); + } catch (err) { + console.error(`Error writing path group ${pg}:`, err); + } + }); } /** * Create article metadata for a path group @@ -353,107 +379,119 @@ function writePathOpenapis(openapi, prefix, outPath) { * @returns Article metadata object */ function createArticleDataForPathGroup(openapi) { - const article = { - path: '', - fields: { - name: openapi['x-pathGroup'] || '', - describes: Object.keys(openapi.paths), - }, - }; - /** - * Convert OpenAPI path to Hugo-friendly article path - * Legacy endpoints (without /api/ prefix) go under api/ directly - * Versioned endpoints (with /api/vN/) keep their structure - * - * @param p - Path to convert (e.g., '/health', '/api/v3/query_sql') - * @returns Path suitable for Hugo content directory (e.g., 'api/health', 'api/v3/query_sql') - */ - const toHugoPath = (p) => { - if (!p) { - return ''; - } - // If path doesn't start with /api/, it's a legacy endpoint - // Place it directly under api/ to avoid collision with /api/v1/* paths - if (!p.startsWith('/api/')) { - // /health -> api/health - // /write -> api/write - return `api${p}`; - } - // /api/v1/health -> api/v1/health - // /api/v2/write -> api/v2/write - // /api/v3/query_sql -> api/v3/query_sql - return p.replace(/^\//, ''); - }; - /** - * Convert path to tag-friendly format (dashes instead of slashes) - * - * @param p - Path to convert - * @returns Tag-friendly path - */ - const toTagPath = (p) => { - if (!p) { - return ''; - } - return p.replace(/^\//, '').replaceAll('/', '-'); - }; - const pathGroup = openapi['x-pathGroup'] || ''; - article.path = toHugoPath(pathGroup); - // Store original path for menu display (shows actual endpoint path) - article.fields.menuName = pathGroup; - article.fields.title = openapi.info?.title; - article.fields.description = openapi.description; - const pathGroupFrags = path.parse(openapi['x-pathGroup'] || ''); - article.fields.tags = [pathGroupFrags?.dir, pathGroupFrags?.name] - .filter(Boolean) - .map((t) => toTagPath(t)); - // Extract x-relatedLinks and OpenAPI tags from path items or operations - const relatedLinks = []; - const apiTags = []; - const httpMethods = [ - 'get', - 'post', - 'put', - 'patch', - 'delete', - 'options', - 'head', - 'trace', - ]; - Object.values(openapi.paths).forEach((pathItem) => { - // Check path-level x-relatedLinks - if (pathItem['x-relatedLinks'] && - Array.isArray(pathItem['x-relatedLinks'])) { - relatedLinks.push(...pathItem['x-relatedLinks'].filter((link) => !relatedLinks.includes(link))); + const article = { + path: '', + fields: { + name: openapi['x-pathGroup'] || '', + describes: Object.keys(openapi.paths), + }, + }; + /** + * Convert OpenAPI path to Hugo-friendly article path + * Legacy endpoints (without /api/ prefix) go under api/ directly + * Versioned endpoints (with /api/vN/) keep their structure + * + * @param p - Path to convert (e.g., '/health', '/api/v3/query_sql') + * @returns Path suitable for Hugo content directory (e.g., 'api/health', 'api/v3/query_sql') + */ + const toHugoPath = (p) => { + if (!p) { + return ''; + } + // If path doesn't start with /api/, it's a legacy endpoint + // Place it directly under api/ to avoid collision with /api/v1/* paths + if (!p.startsWith('/api/')) { + // /health -> api/health + // /write -> api/write + return `api${p}`; + } + // /api/v1/health -> api/v1/health + // /api/v2/write -> api/v2/write + // /api/v3/query_sql -> api/v3/query_sql + return p.replace(/^\//, ''); + }; + /** + * Convert path to tag-friendly format (dashes instead of slashes) + * + * @param p - Path to convert + * @returns Tag-friendly path + */ + const toTagPath = (p) => { + if (!p) { + return ''; + } + return p.replace(/^\//, '').replaceAll('/', '-'); + }; + const pathGroup = openapi['x-pathGroup'] || ''; + article.path = toHugoPath(pathGroup); + // Store original path for menu display (shows actual endpoint path) + article.fields.menuName = pathGroup; + article.fields.title = openapi.info?.title; + article.fields.description = openapi.description; + const pathGroupFrags = path.parse(openapi['x-pathGroup'] || ''); + article.fields.tags = [pathGroupFrags?.dir, pathGroupFrags?.name] + .filter(Boolean) + .map((t) => toTagPath(t)); + // Extract x-relatedLinks and OpenAPI tags from path items or operations + const relatedLinks = []; + const apiTags = []; + const httpMethods = [ + 'get', + 'post', + 'put', + 'patch', + 'delete', + 'options', + 'head', + 'trace', + ]; + Object.values(openapi.paths).forEach((pathItem) => { + // Check path-level x-relatedLinks + if ( + pathItem['x-relatedLinks'] && + Array.isArray(pathItem['x-relatedLinks']) + ) { + relatedLinks.push( + ...pathItem['x-relatedLinks'].filter( + (link) => !relatedLinks.includes(link) + ) + ); + } + // Check operation-level x-relatedLinks and tags + httpMethods.forEach((method) => { + const operation = pathItem[method]; + if (operation) { + // Extract x-relatedLinks + if ( + operation['x-relatedLinks'] && + Array.isArray(operation['x-relatedLinks']) + ) { + relatedLinks.push( + ...operation['x-relatedLinks'].filter( + (link) => !relatedLinks.includes(link) + ) + ); } - // Check operation-level x-relatedLinks and tags - httpMethods.forEach((method) => { - const operation = pathItem[method]; - if (operation) { - // Extract x-relatedLinks - if (operation['x-relatedLinks'] && - Array.isArray(operation['x-relatedLinks'])) { - relatedLinks.push(...operation['x-relatedLinks'].filter((link) => !relatedLinks.includes(link))); - } - // Extract OpenAPI tags from operation - if (operation.tags && Array.isArray(operation.tags)) { - operation.tags.forEach((tag) => { - if (!apiTags.includes(tag)) { - apiTags.push(tag); - } - }); - } + // Extract OpenAPI tags from operation + if (operation.tags && Array.isArray(operation.tags)) { + operation.tags.forEach((tag) => { + if (!apiTags.includes(tag)) { + apiTags.push(tag); } - }); + }); + } + } }); - // Only add related if there are links - if (relatedLinks.length > 0) { - article.fields.related = relatedLinks; - } - // Add OpenAPI tags from operations (for Hugo frontmatter) - if (apiTags.length > 0) { - article.fields.apiTags = apiTags; - } - return article; + }); + // Only add related if there are links + if (relatedLinks.length > 0) { + article.fields.related = relatedLinks; + } + // Add OpenAPI tags from operations (for Hugo frontmatter) + if (apiTags.length > 0) { + article.fields.apiTags = apiTags; + } + return article; } /** * Write OpenAPI article metadata to Hugo data files @@ -464,49 +502,50 @@ function createArticleDataForPathGroup(openapi) { * @param opts - Options including file pattern filter */ function writeOpenapiArticleData(sourcePath, targetPath, opts) { - /** - * Check if path is a file - */ - const isFile = (filePath) => { - return fs.lstatSync(filePath).isFile(); - }; - /** - * Check if filename matches pattern - */ - const matchesPattern = (filePath) => { - return opts.filePattern - ? path.parse(filePath).name.startsWith(opts.filePattern) - : true; - }; - try { - const articles = fs - .readdirSync(sourcePath) - .map((fileName) => path.join(sourcePath, fileName)) - .filter(matchesPattern) - .filter(isFile) - .filter((filePath) => filePath.endsWith('.yaml') || filePath.endsWith('.yml')) // Only process YAML files - .map((filePath) => { - const openapi = readFile(filePath); - const article = createArticleDataForPathGroup(openapi); - article.fields.source = filePath; - // Hugo omits "/static" from the URI when serving files stored in "./static" - article.fields.staticFilePath = filePath.replace(/^static\//, '/'); - return article; - }); - if (!fs.existsSync(targetPath)) { - fs.mkdirSync(targetPath, { recursive: true }); - } - const articleCollection = { articles }; - // Write both YAML and JSON versions - const yamlPath = path.resolve(targetPath, 'articles.yml'); - const jsonPath = path.resolve(targetPath, 'articles.json'); - writeDataFile(articleCollection, yamlPath); - writeJsonFile(articleCollection, jsonPath); - console.log(`Generated ${articles.length} articles in ${targetPath}`); - } - catch (e) { - console.error('Error writing article data:', e); + /** + * Check if path is a file + */ + const isFile = (filePath) => { + return fs.lstatSync(filePath).isFile(); + }; + /** + * Check if filename matches pattern + */ + const matchesPattern = (filePath) => { + return opts.filePattern + ? path.parse(filePath).name.startsWith(opts.filePattern) + : true; + }; + try { + const articles = fs + .readdirSync(sourcePath) + .map((fileName) => path.join(sourcePath, fileName)) + .filter(matchesPattern) + .filter(isFile) + .filter( + (filePath) => filePath.endsWith('.yaml') || filePath.endsWith('.yml') + ) // Only process YAML files + .map((filePath) => { + const openapi = readFile(filePath); + const article = createArticleDataForPathGroup(openapi); + article.fields.source = filePath; + // Hugo omits "/static" from the URI when serving files stored in "./static" + article.fields.staticFilePath = filePath.replace(/^static\//, '/'); + return article; + }); + if (!fs.existsSync(targetPath)) { + fs.mkdirSync(targetPath, { recursive: true }); } + const articleCollection = { articles }; + // Write both YAML and JSON versions + const yamlPath = path.resolve(targetPath, 'articles.yml'); + const jsonPath = path.resolve(targetPath, 'articles.json'); + writeDataFile(articleCollection, yamlPath); + writeJsonFile(articleCollection, jsonPath); + console.log(`Generated ${articles.length} articles in ${targetPath}`); + } catch (e) { + console.error('Error writing article data:', e); + } } /** * Create article data for a tag-based grouping @@ -517,33 +556,36 @@ function writeOpenapiArticleData(sourcePath, targetPath, opts) { * @returns Article metadata object */ function createArticleDataForTag(openapi, operations, tagMeta) { - const tagName = openapi['x-tagGroup'] || ''; - const tagSlug = slugifyTag(tagName); - const isConceptual = tagMeta?.['x-traitTag'] === true; - const article = { - path: `api/${tagSlug}`, - fields: { - name: tagName, - describes: Object.keys(openapi.paths), - title: tagName, - description: tagMeta?.description || openapi.info?.description || `API reference for ${tagName}`, - tag: tagName, - isConceptual, - menuGroup: getMenuGroupForTag(tagName), - operations: operations.map((op) => ({ - operationId: op.operationId, - method: op.method, - path: op.path, - summary: op.summary, - tags: op.tags, - })), - }, - }; - // Add tag description for conceptual pages - if (tagMeta?.description) { - article.fields.tagDescription = tagMeta.description; - } - return article; + const tagName = openapi['x-tagGroup'] || ''; + const tagSlug = slugifyTag(tagName); + const isConceptual = tagMeta?.['x-traitTag'] === true; + const article = { + path: `api/${tagSlug}`, + fields: { + name: tagName, + describes: Object.keys(openapi.paths), + title: tagName, + description: + tagMeta?.description || + openapi.info?.description || + `API reference for ${tagName}`, + tag: tagName, + isConceptual, + menuGroup: getMenuGroupForTag(tagName), + operations: operations.map((op) => ({ + operationId: op.operationId, + method: op.method, + path: op.path, + summary: op.summary, + tags: op.tags, + })), + }, + }; + // Add tag description for conceptual pages + if (tagMeta?.description) { + article.fields.tagDescription = tagMeta.description; + } + return article; } /** * Write tag-based OpenAPI article metadata to Hugo data files @@ -555,65 +597,73 @@ function createArticleDataForTag(openapi, operations, tagMeta) { * @param opts - Options including file pattern filter */ function writeOpenapiTagArticleData(sourcePath, targetPath, openapi, opts) { - const isFile = (filePath) => { - return fs.lstatSync(filePath).isFile(); - }; - const matchesPattern = (filePath) => { - return opts.filePattern - ? path.parse(filePath).name.startsWith(opts.filePattern) - : true; - }; - // Create tag metadata lookup - const tagMetaMap = new Map(); - (openapi.tags || []).forEach((tag) => { - tagMetaMap.set(tag.name, tag); - }); - try { - const articles = fs - .readdirSync(sourcePath) - .map((fileName) => path.join(sourcePath, fileName)) - .filter(matchesPattern) - .filter(isFile) - .filter((filePath) => filePath.endsWith('.yaml') || filePath.endsWith('.yml')) - .map((filePath) => { - const tagOpenapi = readFile(filePath); - const tagName = tagOpenapi['x-tagGroup'] || tagOpenapi.info?.title || ''; - const tagMeta = tagMetaMap.get(tagName); - // Extract operations from the tag-filtered spec - const operations = []; - Object.entries(tagOpenapi.paths).forEach(([pathKey, pathItem]) => { - HTTP_METHODS.forEach((method) => { - const operation = pathItem[method]; - if (operation) { - operations.push({ - operationId: operation.operationId || `${method}-${pathKey}`, - method: method.toUpperCase(), - path: pathKey, - summary: operation.summary || '', - tags: operation.tags || [], - }); - } - }); - }); - const article = createArticleDataForTag(tagOpenapi, operations, tagMeta); - article.fields.source = filePath; - article.fields.staticFilePath = filePath.replace(/^static\//, '/'); - return article; + const isFile = (filePath) => { + return fs.lstatSync(filePath).isFile(); + }; + const matchesPattern = (filePath) => { + return opts.filePattern + ? path.parse(filePath).name.startsWith(opts.filePattern) + : true; + }; + // Create tag metadata lookup + const tagMetaMap = new Map(); + (openapi.tags || []).forEach((tag) => { + tagMetaMap.set(tag.name, tag); + }); + try { + const articles = fs + .readdirSync(sourcePath) + .map((fileName) => path.join(sourcePath, fileName)) + .filter(matchesPattern) + .filter(isFile) + .filter( + (filePath) => filePath.endsWith('.yaml') || filePath.endsWith('.yml') + ) + .map((filePath) => { + const tagOpenapi = readFile(filePath); + const tagName = + tagOpenapi['x-tagGroup'] || tagOpenapi.info?.title || ''; + const tagMeta = tagMetaMap.get(tagName); + // Extract operations from the tag-filtered spec + const operations = []; + Object.entries(tagOpenapi.paths).forEach(([pathKey, pathItem]) => { + HTTP_METHODS.forEach((method) => { + const operation = pathItem[method]; + if (operation) { + operations.push({ + operationId: operation.operationId || `${method}-${pathKey}`, + method: method.toUpperCase(), + path: pathKey, + summary: operation.summary || '', + tags: operation.tags || [], + }); + } + }); }); - if (!fs.existsSync(targetPath)) { - fs.mkdirSync(targetPath, { recursive: true }); - } - const articleCollection = { articles }; - // Write both YAML and JSON versions - const yamlPath = path.resolve(targetPath, 'articles.yml'); - const jsonPath = path.resolve(targetPath, 'articles.json'); - writeDataFile(articleCollection, yamlPath); - writeJsonFile(articleCollection, jsonPath); - console.log(`Generated ${articles.length} tag-based articles in ${targetPath}`); - } - catch (e) { - console.error('Error writing tag article data:', e); + const article = createArticleDataForTag( + tagOpenapi, + operations, + tagMeta + ); + article.fields.source = filePath; + article.fields.staticFilePath = filePath.replace(/^static\//, '/'); + return article; + }); + if (!fs.existsSync(targetPath)) { + fs.mkdirSync(targetPath, { recursive: true }); } + const articleCollection = { articles }; + // Write both YAML and JSON versions + const yamlPath = path.resolve(targetPath, 'articles.yml'); + const jsonPath = path.resolve(targetPath, 'articles.json'); + writeDataFile(articleCollection, yamlPath); + writeJsonFile(articleCollection, jsonPath); + console.log( + `Generated ${articles.length} tag-based articles in ${targetPath}` + ); + } catch (e) { + console.error('Error writing tag article data:', e); + } } /** * Generate Hugo data files from an OpenAPI specification grouped by tag @@ -627,24 +677,28 @@ function writeOpenapiTagArticleData(sourcePath, targetPath, openapi, opts) { * @param options - Generation options */ function generateHugoDataByTag(options) { - const filenamePrefix = `${path.parse(options.specFile).name}-`; - const sourceFile = readFile(options.specFile, 'utf8'); - // Optionally generate path-based files for backwards compatibility - if (options.includePaths) { - console.log(`\nGenerating OpenAPI path files in ${options.dataOutPath}....`); - writePathOpenapis(sourceFile, filenamePrefix, options.dataOutPath); - } - // Generate tag-based files - const tagOutPath = options.includePaths - ? path.join(options.dataOutPath, 'tags') - : options.dataOutPath; - console.log(`\nGenerating OpenAPI tag files in ${tagOutPath}....`); - writeTagOpenapis(sourceFile, filenamePrefix, tagOutPath); - console.log(`\nGenerating OpenAPI tag article data in ${options.articleOutPath}...`); - writeOpenapiTagArticleData(tagOutPath, options.articleOutPath, sourceFile, { - filePattern: filenamePrefix, - }); - console.log('\nTag-based generation complete!\n'); + const filenamePrefix = `${path.parse(options.specFile).name}-`; + const sourceFile = readFile(options.specFile, 'utf8'); + // Optionally generate path-based files for backwards compatibility + if (options.includePaths) { + console.log( + `\nGenerating OpenAPI path files in ${options.dataOutPath}....` + ); + writePathOpenapis(sourceFile, filenamePrefix, options.dataOutPath); + } + // Generate tag-based files + const tagOutPath = options.includePaths + ? path.join(options.dataOutPath, 'tags') + : options.dataOutPath; + console.log(`\nGenerating OpenAPI tag files in ${tagOutPath}....`); + writeTagOpenapis(sourceFile, filenamePrefix, tagOutPath); + console.log( + `\nGenerating OpenAPI tag article data in ${options.articleOutPath}...` + ); + writeOpenapiTagArticleData(tagOutPath, options.articleOutPath, sourceFile, { + filePattern: filenamePrefix, + }); + console.log('\nTag-based generation complete!\n'); } /** * Generate Hugo data files from an OpenAPI specification @@ -658,19 +712,21 @@ function generateHugoDataByTag(options) { * @param options - Generation options */ function generateHugoData(options) { - const filenamePrefix = `${path.parse(options.specFile).name}-`; - const sourceFile = readFile(options.specFile, 'utf8'); - console.log(`\nGenerating OpenAPI path files in ${options.dataOutPath}....`); - writePathOpenapis(sourceFile, filenamePrefix, options.dataOutPath); - console.log(`\nGenerating OpenAPI article data in ${options.articleOutPath}...`); - writeOpenapiArticleData(options.dataOutPath, options.articleOutPath, { - filePattern: filenamePrefix, - }); - console.log('\nGeneration complete!\n'); + const filenamePrefix = `${path.parse(options.specFile).name}-`; + const sourceFile = readFile(options.specFile, 'utf8'); + console.log(`\nGenerating OpenAPI path files in ${options.dataOutPath}....`); + writePathOpenapis(sourceFile, filenamePrefix, options.dataOutPath); + console.log( + `\nGenerating OpenAPI article data in ${options.articleOutPath}...` + ); + writeOpenapiArticleData(options.dataOutPath, options.articleOutPath, { + filePattern: filenamePrefix, + }); + console.log('\nGeneration complete!\n'); } // CommonJS export for backward compatibility module.exports = { - generateHugoData, - generateHugoDataByTag, + generateHugoData, + generateHugoDataByTag, }; -//# sourceMappingURL=index.js.map \ No newline at end of file +//# sourceMappingURL=index.js.map diff --git a/api-docs/scripts/generate-openapi-articles.ts b/api-docs/scripts/generate-openapi-articles.ts index a0b83af027..317910de62 100644 --- a/api-docs/scripts/generate-openapi-articles.ts +++ b/api-docs/scripts/generate-openapi-articles.ts @@ -308,7 +308,9 @@ interface GenerateTagPagesOptions { * * @param options - Generation options */ -function generateTagPagesFromArticleData(options: GenerateTagPagesOptions): void { +function generateTagPagesFromArticleData( + options: GenerateTagPagesOptions +): void { const { articlesPath, contentPath, @@ -417,7 +419,11 @@ ${yaml.dump(parentFrontmatter)}--- }; // Add operations for TOC generation (only for non-conceptual pages) - if (!isConceptual && article.fields.operations && article.fields.operations.length > 0) { + if ( + !isConceptual && + article.fields.operations && + article.fields.operations.length > 0 + ) { frontmatter.operations = article.fields.operations; } @@ -595,7 +601,10 @@ function processProduct(productKey: string, config: ProductConfig): void { // Step 5: Generate Hugo data from OpenAPI spec if (config.useTagBasedGeneration) { // Tag-based generation: group operations by OpenAPI tag - const staticTagsPath = path.join(staticPath, `influxdb-${productKey}/tags`); + const staticTagsPath = path.join( + staticPath, + `influxdb-${productKey}/tags` + ); console.log(`\n📋 Using tag-based generation for ${productKey}...`); openapiPathsToHugo.generateHugoDataByTag({ specFile: config.specFile, diff --git a/api-docs/scripts/openapi-paths-to-hugo-data/index.ts b/api-docs/scripts/openapi-paths-to-hugo-data/index.ts index 3d0d0f27ff..7e7187768b 100644 --- a/api-docs/scripts/openapi-paths-to-hugo-data/index.ts +++ b/api-docs/scripts/openapi-paths-to-hugo-data/index.ts @@ -366,7 +366,7 @@ function slugifyTag(tagName: string): string { const TAG_MENU_GROUPS: Record = { // Concepts group 'Quick start': 'Concepts', - 'Authentication': 'Concepts', + Authentication: 'Concepts', 'Headers and parameters': 'Concepts', 'Response codes': 'Concepts', // Data Operations group @@ -374,9 +374,9 @@ const TAG_MENU_GROUPS: Record = { 'Query data': 'Data Operations', 'Cache data': 'Data Operations', // Administration group - 'Database': 'Administration', - 'Table': 'Administration', - 'Token': 'Administration', + Database: 'Administration', + Table: 'Administration', + Token: 'Administration', // Processing Engine group 'Processing engine': 'Processing Engine', // Server group @@ -865,7 +865,7 @@ function createArticleDataForTag( operations: OperationMeta[], tagMeta?: Tag ): Article { - const tagName = openapi['x-tagGroup'] as string || ''; + const tagName = (openapi['x-tagGroup'] as string) || ''; const tagSlug = slugifyTag(tagName); const isConceptual = tagMeta?.['x-traitTag'] === true; @@ -875,7 +875,10 @@ function createArticleDataForTag( name: tagName, describes: Object.keys(openapi.paths), title: tagName, - description: tagMeta?.description || openapi.info?.description || `API reference for ${tagName}`, + description: + tagMeta?.description || + openapi.info?.description || + `API reference for ${tagName}`, tag: tagName, isConceptual, menuGroup: getMenuGroupForTag(tagName), @@ -939,7 +942,8 @@ function writeOpenapiTagArticleData( ) .map((filePath) => { const tagOpenapi = readFile(filePath); - const tagName = tagOpenapi['x-tagGroup'] as string || tagOpenapi.info?.title || ''; + const tagName = + (tagOpenapi['x-tagGroup'] as string) || tagOpenapi.info?.title || ''; const tagMeta = tagMetaMap.get(tagName); // Extract operations from the tag-filtered spec @@ -959,7 +963,11 @@ function writeOpenapiTagArticleData( }); }); - const article = createArticleDataForTag(tagOpenapi, operations, tagMeta); + const article = createArticleDataForTag( + tagOpenapi, + operations, + tagMeta + ); article.fields.source = filePath; article.fields.staticFilePath = filePath.replace(/^static\//, '/'); return article; @@ -978,7 +986,9 @@ function writeOpenapiTagArticleData( writeDataFile(articleCollection, yamlPath); writeJsonFile(articleCollection, jsonPath); - console.log(`Generated ${articles.length} tag-based articles in ${targetPath}`); + console.log( + `Generated ${articles.length} tag-based articles in ${targetPath}` + ); } catch (e) { console.error('Error writing tag article data:', e); } @@ -1003,13 +1013,17 @@ export interface GenerateHugoDataByTagOptions extends GenerateHugoDataOptions { * * @param options - Generation options */ -export function generateHugoDataByTag(options: GenerateHugoDataByTagOptions): void { +export function generateHugoDataByTag( + options: GenerateHugoDataByTagOptions +): void { const filenamePrefix = `${path.parse(options.specFile).name}-`; const sourceFile = readFile(options.specFile, 'utf8'); // Optionally generate path-based files for backwards compatibility if (options.includePaths) { - console.log(`\nGenerating OpenAPI path files in ${options.dataOutPath}....`); + console.log( + `\nGenerating OpenAPI path files in ${options.dataOutPath}....` + ); writePathOpenapis(sourceFile, filenamePrefix, options.dataOutPath); } diff --git a/cypress/e2e/content/api-reference.cy.js b/cypress/e2e/content/api-reference.cy.js index e2798563c4..85a4ce957d 100644 --- a/cypress/e2e/content/api-reference.cy.js +++ b/cypress/e2e/content/api-reference.cy.js @@ -13,8 +13,8 @@ const fakeGoogleTagManager = { trackingOptIn: () => {}, - trackingOptOut: () => {} -} + trackingOptOut: () => {}, +}; describe('API reference content', () => { const subjects = [ @@ -49,43 +49,47 @@ describe('API reference content', () => { '/influxdb3/enterprise/api/', ]; - subjects.forEach((subject) => { describe(subject, () => { beforeEach(() => { - // Intercept and modify the page HTML before it loads - cy.intercept('GET', '**', (req) => { - req.continue((res) => { - if (res.headers['content-type']?.includes('text/html')) { - // Modify the Kapa widget script attributes - // Avoid socket errors from fpjs in tests by disabling fingerprinting - res.body = res.body.replace( - /data-user-analytics-fingerprint-enabled="true"/, - 'data-user-analytics-fingerprint-enabled="false"' - ); - } - }); - }); + // Intercept and modify the page HTML before it loads + cy.intercept('GET', '**', (req) => { + req.continue((res) => { + if (res.headers['content-type']?.includes('text/html')) { + // Modify the Kapa widget script attributes + // Avoid socket errors from fpjs in tests by disabling fingerprinting + res.body = res.body.replace( + /data-user-analytics-fingerprint-enabled="true"/, + 'data-user-analytics-fingerprint-enabled="false"' + ); + } + }); + }); cy.visit(subject); - window.fcdsc = fakeGoogleTagManager; cy.stub(window.fcdsc, 'trackingOptIn').as('trackingOptIn'); cy.stub(window.fcdsc, 'trackingOptOut').as('trackingOptOut'); }); it(`has API info`, function () { - cy.get('script[data-user-analytics-fingerprint-enabled=false]').should('have.length', 1); + cy.get('script[data-user-analytics-fingerprint-enabled=false]').should( + 'have.length', + 1 + ); cy.get('h1').first().should('have.length', 1); // Check for description element (either article--description class or data-role attribute) - cy.get('.article--description, [data-role$=description]').should('have.length.at.least', 1); + cy.get('.article--description, [data-role$=description]').should( + 'have.length.at.least', + 1 + ); }); it('links back to the version home page', function () { - cy.get('a.back').contains('Docs') - .should('have.length', 1) - .click(); + cy.get('a.back').contains('Docs').should('have.length', 1).click(); // Path should be the first two segments and trailing slash in $subject - cy.location('pathname') - .should('eq', subject.replace(/^(\/[^/]+\/[^/]+\/).*/, '$1')); + cy.location('pathname').should( + 'eq', + subject.replace(/^(\/[^/]+\/[^/]+\/).*/, '$1') + ); cy.get('h1').should('have.length', 1); }); it('contains valid internal links', function () { @@ -101,8 +105,7 @@ describe('API reference content', () => { // cy.request doesn't show in your browser's Developer Tools // because the request comes from Node, not from the browser. cy.request($a.attr('href')).its('status').should('eq', 200); - }); - + }); }); }); it('contains valid external links', function () { @@ -213,7 +216,9 @@ describe('API reference 3-column layout', () => { cy.get('.api-tabs-nav a').eq(1).should('have.class', 'is-active'); // Verify the first tab is no longer active - cy.get('.api-tabs-nav a').eq(0).should('not.have.class', 'is-active'); + cy.get('.api-tabs-nav a') + .eq(0) + .should('not.have.class', 'is-active'); }); }); @@ -225,7 +230,10 @@ describe('API reference 3-column layout', () => { it('restores tab from URL hash on page load', () => { // Use the current subject URL with hash instead of hardcoded old reference URL cy.visit(`${subject}#authentication`); - cy.get('.api-tabs-nav a[data-tab="authentication"]').should('have.class', 'is-active'); + cy.get('.api-tabs-nav a[data-tab="authentication"]').should( + 'have.class', + 'is-active' + ); cy.get('[data-tab-panel="authentication"]').should('be.visible'); }); }); @@ -245,7 +253,9 @@ describe('API reference 3-column layout', () => { describe('API Renderer', () => { it('loads API documentation renderer', () => { - cy.get('.api-reference-container, rapi-doc, .api-reference-wrapper').should('exist'); + cy.get( + '.api-reference-container, rapi-doc, .api-reference-wrapper' + ).should('exist'); }); it('displays spec download link', () => { From b87f4979c6509318658f2bc68684f799b1e610ef Mon Sep 17 00:00:00 2001 From: Jason Stirnaman Date: Mon, 8 Dec 2025 14:34:25 -0600 Subject: [PATCH 14/51] fix(api): Extract first sentence for header summary, add Overview section - Header summary now shows only the first sentence from description using regex extraction with fallback to first line for descriptions without sentence-ending punctuation - Added Overview section with full description after endpoints list --- layouts/api/list.html | 22 +++++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) diff --git a/layouts/api/list.html b/layouts/api/list.html index 9be024de96..47f5589a8f 100644 --- a/layouts/api/list.html +++ b/layouts/api/list.html @@ -31,13 +31,21 @@

{{ .Title }}

- {{/* Summary - brief description at top */}} + {{/* Summary - first sentence only at top */}} {{ with .Params.summary }}

{{ . | markdownify }}

{{ else }} - {{/* Fallback to first line of description if no summary */}} + {{/* Extract first sentence from description (or full text if no sentence ending) */}} {{ with .Description }} -

{{ . | truncate 200 | markdownify }}

+ {{ $matches := findRE `^[^.!?]*[.!?]` . 1 }} + {{ if gt (len $matches) 0 }} + {{ $firstSentence := index $matches 0 }} +

{{ $firstSentence | markdownify }}

+ {{ else }} + {{/* No sentence ending found - use first line or full description */}} + {{ $firstLine := index (split . "\n") 0 }} +

{{ $firstLine | markdownify }}

+ {{ end }} {{ end }} {{ end }}
@@ -90,6 +98,14 @@

Endpoints

{{ end }} + {{/* Overview - full tag description after endpoints */}} + {{ with .Description }} +
+

Overview

+ {{ . | markdownify }} +
+ {{ end }} + {{/* Hugo page content if any */}} {{ with .Content }}
From 1e622be0b5d5974b1124241a8321a56de24c82a4 Mon Sep 17 00:00:00 2001 From: Jason Stirnaman Date: Mon, 8 Dec 2025 14:36:50 -0600 Subject: [PATCH 15/51] fix(api): Show operations in sidebar nav, fix CSS for menu and summary - Sidebar nav now shows operations with method badges and paths instead of duplicating tag names when groups are expanded - Added background color to nav group items to match sidebar - Increased max-height for expanded groups to accommodate all operations - Added styles for operation items in sidebar nav (.api-nav-operation) - Fixed summary paragraph width by setting flex-basis to 100% --- assets/styles/layouts/_api-layout.scss | 50 +++++++++++++++++++++++--- layouts/partials/api/sidebar-nav.html | 25 ++++++++++--- 2 files changed, 67 insertions(+), 8 deletions(-) diff --git a/assets/styles/layouts/_api-layout.scss b/assets/styles/layouts/_api-layout.scss index bd44ba1ca2..310a4282ef 100644 --- a/assets/styles/layouts/_api-layout.scss +++ b/assets/styles/layouts/_api-layout.scss @@ -270,9 +270,10 @@ max-height: 0; overflow: hidden; transition: max-height 0.3s ease-out; + background: $body-bg; // Match sidebar background &.is-open { - max-height: 1000px; // Large enough to show all items + max-height: 2000px; // Large enough to show all operations } } } @@ -301,7 +302,7 @@ font-weight: $medium; } - // HTTP method badge + // HTTP method badge (legacy class) .method-badge { display: inline-block; font-size: 0.65rem; @@ -319,6 +320,41 @@ &.patch { background-color: $y-thunder; color: rgba($g5-pepper, 0.75); } &.delete { background-color: $r-curacao; color: #fff; } } + + // Operation items with method badges and paths + &.api-nav-operation { + a { + display: flex; + align-items: center; + gap: 0.4rem; + font-size: 0.85rem; + } + + .api-method { + display: inline-block; + font-size: 0.55rem; + font-weight: $bold; + text-transform: uppercase; + padding: 0.15rem 0.3rem; + border-radius: 3px; + min-width: 2rem; + text-align: center; + flex-shrink: 0; + + &--get { background-color: $gr-rainforest; color: #fff; } + &--post { background-color: $b-ocean; color: #fff; } + &--put { background-color: $br-galaxy; color: #fff; } + &--patch { background-color: $y-thunder; color: rgba($g5-pepper, 0.75); } + &--delete { background-color: $r-curacao; color: #fff; } + } + + .api-path { + font-family: $code; + font-size: 0.75rem; + word-break: break-all; + color: inherit; + } + } } } @@ -336,8 +372,14 @@ } .article--header-text { - flex: 1; - min-width: 200px; + flex: 1 1 100%; // Take full width, allowing download button to wrap + min-width: 0; +} + +// Summary paragraph in header - ensure full width +.article--summary { + max-width: none; + width: 100%; } // Download OpenAPI spec button diff --git a/layouts/partials/api/sidebar-nav.html b/layouts/partials/api/sidebar-nav.html index 0acb89d756..31fb525af3 100644 --- a/layouts/partials/api/sidebar-nav.html +++ b/layouts/partials/api/sidebar-nav.html @@ -105,19 +105,36 @@

API Reference

{{ if $article }} {{ $path := index $article "path" }} {{ $fields := index $article "fields" }} - {{ $menuName := $tagName }} - {{ if and (reflect.IsMap $fields) (isset $fields "menuName") }} - {{ $menuName = index $fields "menuName" }} - {{ end }} {{ $isConceptual := false }} {{ if and (reflect.IsMap $fields) (isset $fields "isConceptual") }} {{ $isConceptual = index $fields "isConceptual" }} {{ end }} + {{/* Show operations from article data */}} + {{ $operations := slice }} + {{ if and (reflect.IsMap $fields) (isset $fields "operations") }} + {{ $operations = index $fields "operations" }} + {{ end }} + {{ if gt (len $operations) 0 }} + {{ range $operations }} +
  • + + {{ upper .method }} + {{ .path }} + +
  • + {{ end }} + {{ else }} + {{/* Fallback for conceptual pages or pages without operations */}} + {{ $menuName := $tagName }} + {{ if and (reflect.IsMap $fields) (isset $fields "menuName") }} + {{ $menuName = index $fields "menuName" }} + {{ end }} + {{ end }} {{ end }} {{ end }} From aee0155bf082875a5db1ef60f7014af5e4fb1dfc Mon Sep 17 00:00:00 2001 From: Jason Stirnaman Date: Mon, 8 Dec 2025 14:38:54 -0600 Subject: [PATCH 16/51] fix(api): Match sidebar nav font sizes for operation items - Changed operation link font-size from 0.85rem to 0.95rem to match sidebar - Changed path code font-size from 0.75rem to 0.85rem for consistency - Adjusted method badge font-size from 0.55rem to 0.6rem for readability --- assets/styles/layouts/_api-layout.scss | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/assets/styles/layouts/_api-layout.scss b/assets/styles/layouts/_api-layout.scss index 310a4282ef..3c3c11ce94 100644 --- a/assets/styles/layouts/_api-layout.scss +++ b/assets/styles/layouts/_api-layout.scss @@ -327,17 +327,17 @@ display: flex; align-items: center; gap: 0.4rem; - font-size: 0.85rem; + font-size: 0.95rem; // Match sidebar nav font size } .api-method { display: inline-block; - font-size: 0.55rem; + font-size: 0.6rem; font-weight: $bold; text-transform: uppercase; padding: 0.15rem 0.3rem; border-radius: 3px; - min-width: 2rem; + min-width: 2.2rem; text-align: center; flex-shrink: 0; @@ -350,7 +350,7 @@ .api-path { font-family: $code; - font-size: 0.75rem; + font-size: 0.85rem; // Match sidebar nav font size word-break: break-all; color: inherit; } From b1fcaa1443b3a0fa859175820e25f52c4aa19275 Mon Sep 17 00:00:00 2001 From: Jason Stirnaman Date: Mon, 8 Dec 2025 15:37:13 -0600 Subject: [PATCH 17/51] fix(api): Reset button styles for dark mode, increase operation font size - Add button reset styles to .api-nav-group-header for dark mode compatibility (background: none, border: none, width: 100%) - Increase operation link font-size from 0.95rem to 1rem to match sidebar nav-item font size (18px base) - Increase api-path code font-size from 0.85rem to 0.9rem --- assets/styles/layouts/_api-layout.scss | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/assets/styles/layouts/_api-layout.scss b/assets/styles/layouts/_api-layout.scss index 3c3c11ce94..c4993f4b6e 100644 --- a/assets/styles/layouts/_api-layout.scss +++ b/assets/styles/layouts/_api-layout.scss @@ -240,6 +240,13 @@ color: $nav-category; cursor: pointer; transition: color 0.2s; + // Button reset for dark mode compatibility + background: none; + border: none; + width: 100%; + text-align: left; + font-size: inherit; + font-family: inherit; &:hover { color: $nav-category-hover; @@ -327,7 +334,7 @@ display: flex; align-items: center; gap: 0.4rem; - font-size: 0.95rem; // Match sidebar nav font size + font-size: 1rem; // Match sidebar nav-item font size (18px base) } .api-method { @@ -350,7 +357,7 @@ .api-path { font-family: $code; - font-size: 0.85rem; // Match sidebar nav font size + font-size: 0.9rem; // Slightly smaller than link text for hierarchy word-break: break-all; color: inherit; } From 6e38e24335a0a9bf5e6994bebf5f756bb43a0a42 Mon Sep 17 00:00:00 2001 From: Jason Stirnaman Date: Tue, 9 Dec 2025 16:05:53 -0600 Subject: [PATCH 18/51] feat(api): Integrate API navigation into Hugo menu system Add API nav items as children of "InfluxDB HTTP API" menu item: - New api-menu-items.html partial generates nav from data/articles.yml - Modified nested-menu.html to inject API nav for API parent menu item - Updated api_nav_groups.yml to add url for Administration group - Created Administration landing pages for Core and Enterprise - Updated .gitignore to allow hand-crafted API conceptual pages The Administration page uses layout: list and isConceptual: true to render content directly without RapiDoc wrapper. --- .gitignore | 4 + .../core/api/administration/_index.md | 26 +++ .../enterprise/api/administration/_index.md | 26 +++ data/api_nav_groups.yml | 7 + layouts/partials/sidebar/api-menu-items.html | 216 ++++++++++++++++++ layouts/partials/sidebar/nested-menu.html | 29 ++- 6 files changed, 302 insertions(+), 6 deletions(-) create mode 100644 content/influxdb3/core/api/administration/_index.md create mode 100644 content/influxdb3/enterprise/api/administration/_index.md create mode 100644 layouts/partials/sidebar/api-menu-items.html diff --git a/.gitignore b/.gitignore index 8688142320..7a973e3116 100644 --- a/.gitignore +++ b/.gitignore @@ -22,6 +22,10 @@ package-lock.json /content/influxdb3/*/reference/api/** /static/openapi +# Exception: hand-crafted API conceptual pages (not generated) +!/content/influxdb3/*/api/administration/ +!/content/influxdb3/*/api/administration/_index.md + /helper-scripts/output/* /telegraf-build !telegraf-build/templates diff --git a/content/influxdb3/core/api/administration/_index.md b/content/influxdb3/core/api/administration/_index.md new file mode 100644 index 0000000000..ba791c152a --- /dev/null +++ b/content/influxdb3/core/api/administration/_index.md @@ -0,0 +1,26 @@ +--- +title: Administration +description: Endpoints for managing databases, tables, and tokens. +type: api +layout: list +weight: 105 +isConceptual: true +--- + +Use the Administration API to manage InfluxDB resources: + + diff --git a/content/influxdb3/enterprise/api/administration/_index.md b/content/influxdb3/enterprise/api/administration/_index.md new file mode 100644 index 0000000000..833443bd44 --- /dev/null +++ b/content/influxdb3/enterprise/api/administration/_index.md @@ -0,0 +1,26 @@ +--- +title: Administration +description: Endpoints for managing databases, tables, and tokens. +type: api +layout: list +weight: 105 +isConceptual: true +--- + +Use the Administration API to manage InfluxDB resources: + + diff --git a/data/api_nav_groups.yml b/data/api_nav_groups.yml index 9598e548cc..637284c7eb 100644 --- a/data/api_nav_groups.yml +++ b/data/api_nav_groups.yml @@ -1,6 +1,12 @@ # API Navigation Groups # Defines sidebar navigation structure for API reference documentation # Tags are grouped by function/task for better UX +# +# Group fields: +# name: Display name in the navigation +# weight: Sort order (lower = higher) +# tags: List of tag names that belong to this group +# url: (optional) URL path suffix for the group page (relative to api/) groups: - name: Concepts @@ -27,6 +33,7 @@ groups: - name: Administration weight: 5 + url: administration tags: - Database - Table diff --git a/layouts/partials/sidebar/api-menu-items.html b/layouts/partials/sidebar/api-menu-items.html new file mode 100644 index 0000000000..2850e26560 --- /dev/null +++ b/layouts/partials/sidebar/api-menu-items.html @@ -0,0 +1,216 @@ +{{/* + API Reference Menu Items for Hugo Navigation + + Generates + + {{ else }} + {{/* Multi-tag group: group label (or link if url defined) with tag pages as children */}} + {{ $groupUrl := "" }} + {{ $groupIsActive := false }} + {{ with $group.url }} + {{ $groupUrl = print "/" $product "/" $version "/api/" . "/" | relURL }} + {{ $groupIsActive = eq $currentPage.RelPermalink (print "/" $product "/" $version "/api/" . "/") }} + {{ end }} + + {{ end }} + {{ end }} + {{ end }} +{{ end }} diff --git a/layouts/partials/sidebar/nested-menu.html b/layouts/partials/sidebar/nested-menu.html index 67cf9a1e00..12dd9eec14 100644 --- a/layouts/partials/sidebar/nested-menu.html +++ b/layouts/partials/sidebar/nested-menu.html @@ -1,22 +1,39 @@ {{ $page := .page }} {{ $menu := .menu }} +{{ $siteData := .siteData }} {{ define "recursiveMenu" }} {{ $menuContext := .menu }} {{ $currentPage := .currentPage }} + {{ $site := .site }} + {{ $siteData := .siteData }} {{ $depth := add .depth 1 }} {{ $navClass := cond (gt $depth 1) "item" "category" }} {{ range $menuContext }} + {{/* Check if this is the InfluxDB HTTP API menu item for InfluxDB 3 products */}} + {{ $isApiParent := and (eq .Name "InfluxDB HTTP API") (or (hasPrefix .URL "/influxdb3/") (hasPrefix .URL "/influxdb/")) }} + {{ end }} @@ -199,7 +205,13 @@ {{ end }} From f6de23b5ec824890e23aa2876da26f8593a4d0e4 Mon Sep 17 00:00:00 2001 From: Jason Stirnaman Date: Thu, 11 Dec 2025 17:25:38 -0600 Subject: [PATCH 22/51] feat(api): Add RapiDoc Mini component for API operation pages The feature is shippable, but needs a few small fixes and we'll need to update or alias all API docs links for Core and Ent3. - Add rapidoc-mini.ts TypeScript component with CDN loading and theme sync - Add api-operation layout for standalone operation pages - Add rapidoc-mini.html partial for reusable RapiDoc rendering - Add rapidoc-custom.css for RapiDoc style overrides - Register rapidoc-mini component in main.js - Add article data for cloud-dedicated and clustered products - Update API reference Cypress tests --- api-docs/influxdb3/core/v3/ref.yml | 40 +- api-docs/influxdb3/enterprise/v3/ref.yml | 173 +-------- .../scripts/dist/generate-openapi-articles.js | 128 ++++++- .../dist/openapi-paths-to-hugo-data/index.js | 16 + api-docs/scripts/generate-openapi-articles.ts | 176 ++++++++- .../openapi-paths-to-hugo-data/index.ts | 20 + assets/js/components/rapidoc-mini.ts | 334 +++++++++++++++++ assets/js/main.js | 2 + assets/styles/layouts/_api-layout.scss | 53 ++- assets/styles/layouts/_sidebar.scss | 15 +- cypress/e2e/content/api-reference.cy.js | 156 ++++++++ .../influxdb/cloud-dedicated/articles.json | 30 ++ .../influxdb/clustered/articles.json | 66 ++++ .../influxdb/influxdb3_core/articles.json | 170 +++------ .../influxdb/influxdb3_core/articles.yml | 295 +++++---------- .../influxdb3_enterprise/articles.json | 194 +++------- .../influxdb3_enterprise/articles.yml | 354 +++++------------- layouts/api-operation/operation.html | 73 ++++ layouts/api/list.html | 18 +- layouts/partials/api/rapidoc-mini.html | 135 +++++++ layouts/partials/api/rapidoc.html | 150 +++++++- layouts/partials/sidebar/api-menu-items.html | 56 ++- static/css/rapidoc-custom.css | 18 + 23 files changed, 1693 insertions(+), 979 deletions(-) create mode 100644 assets/js/components/rapidoc-mini.ts create mode 100644 data/article_data/influxdb/cloud-dedicated/articles.json create mode 100644 data/article_data/influxdb/clustered/articles.json create mode 100644 layouts/api-operation/operation.html create mode 100644 layouts/partials/api/rapidoc-mini.html create mode 100644 static/css/rapidoc-custom.css diff --git a/api-docs/influxdb3/core/v3/ref.yml b/api-docs/influxdb3/core/v3/ref.yml index ca8b8d55cf..a8d7c54699 100644 --- a/api-docs/influxdb3/core/v3/ref.yml +++ b/api-docs/influxdb3/core/v3/ref.yml @@ -90,33 +90,20 @@ tags: - [Manage the Distinct Value Cache](/influxdb3/core/admin/distinct-value-cache/) - [Manage the Last Value Cache](/influxdb3/core/admin/last-value-cache/) - - name: Compatibility endpoints + - name: Migrate from InfluxDB v1 or v2 + x-traitTag: true description: | - InfluxDB 3 provides compatibility endpoints for InfluxDB 1.x and InfluxDB 2.x workloads and clients. - - ### Write data using v1- or v2-compatible endpoints - - - [`/api/v2/write` endpoint](#operation/PostV2Write) - for InfluxDB v2 clients and when you bring existing InfluxDB v2 write workloads to InfluxDB 3. - - [`/write` endpoint](#operation/PostV1Write) for InfluxDB v1 clients and when you bring existing InfluxDB v1 write workloads to InfluxDB 3. - - For new workloads, use the [`/api/v3/write_lp` endpoint](#operation/PostWriteLP). - - All endpoints accept the same line protocol format. - - ### Query data - - Use the HTTP [`/query`](#operation/GetV1ExecuteQuery) endpoint for InfluxDB v1 clients and v1 query workloads using InfluxQL. - - For new workloads, use one of the following: + Migrate your existing InfluxDB v1 or v2 workloads to InfluxDB 3. - - HTTP [`/api/v3/query_sql` endpoint](#operation/GetExecuteQuerySQL) for new query workloads using SQL. - - HTTP [`/api/v3/query_influxql` endpoint](#operation/GetExecuteInfluxQLQuery) for new query workloads using InfluxQL. - - Flight SQL and InfluxDB 3 _Flight+gRPC_ APIs for querying with SQL or InfluxQL. For more information about using Flight APIs, see [InfluxDB 3 client libraries](https://github.com/InfluxCommunity?q=influxdb3&type=public&language=&sort=). + InfluxDB 3 provides compatibility endpoints that work with InfluxDB 1.x and 2.x client libraries and tools. + Operations marked with v1 or v2 badges are compatible with the respective InfluxDB version. - ### Server information + ### Migration guides - Server information endpoints such as `/health` and `metrics` are compatible with InfluxDB 1.x and InfluxDB 2.x clients. + - [Migrate from InfluxDB v1](/influxdb3/core/guides/migrate/influxdb-1x/) - For users migrating from InfluxDB 1.x + - [Migrate from InfluxDB v2](/influxdb3/core/guides/migrate/influxdb-2x/) - For users migrating from InfluxDB 2.x or Cloud + - [Use compatibility APIs to write data](/influxdb3/core/write-data/http-api/compatibility-apis/) - v1 and v2 write endpoints + - [Use the v1 HTTP query API](/influxdb3/core/query-data/execute-queries/influxdb-v1-api/) - InfluxQL queries via HTTP - name: Database description: Manage databases - description: | @@ -340,7 +327,6 @@ paths: '413': description: Request entity too large. tags: - - Compatibility endpoints - Write data x-influxdata-guides: - title: Use compatibility APIs to write data @@ -429,7 +415,6 @@ paths: '413': description: Request entity too large. tags: - - Compatibility endpoints - Write data x-influxdata-guides: - title: Use compatibility APIs to write data @@ -852,7 +837,6 @@ paths: description: Unprocessable entity. tags: - Query data - - Compatibility endpoints x-influxdata-guides: - title: Use the InfluxDB v1 HTTP query API and InfluxQL to query data href: /influxdb3/core/query-data/execute-queries/influxdb-v1-api/ @@ -970,7 +954,6 @@ paths: description: Unprocessable entity. tags: - Query data - - Compatibility endpoints x-influxdata-guides: - title: Use the InfluxDB v1 HTTP query API and InfluxQL to query data href: /influxdb3/core/query-data/execute-queries/influxdb-v1-api/ @@ -999,7 +982,6 @@ paths: description: Service is unavailable. tags: - Server information - - Compatibility endpoints /ping: get: operationId: GetPing @@ -2693,10 +2675,10 @@ x-tagGroups: tags: - Quick start - Authentication + - Migrate from InfluxDB v1 or v2 - Cache data - Common parameters - Response codes - - Compatibility endpoints - Database - Processing engine - Server information diff --git a/api-docs/influxdb3/enterprise/v3/ref.yml b/api-docs/influxdb3/enterprise/v3/ref.yml index 1116a91d4e..b68a86a557 100644 --- a/api-docs/influxdb3/enterprise/v3/ref.yml +++ b/api-docs/influxdb3/enterprise/v3/ref.yml @@ -5,18 +5,15 @@ info: The InfluxDB HTTP API for InfluxDB 3 Enterprise provides a programmatic interface for interacting with InfluxDB 3 Enterprise databases and resources. Use this API to: - - Write data to InfluxDB 3 Enterprise databases - Query data using SQL or InfluxQL - Process data using Processing engine plugins - Manage databases, tables, and Processing engine triggers - Perform administrative tasks and access system information - The API includes endpoints under the following paths: - `/api/v3`: InfluxDB 3 Enterprise native endpoints - `/`: Compatibility endpoints for InfluxDB v1 workloads and clients - `/api/v2/write`: Compatibility endpoint for InfluxDB v2 workloads and clients - + + # Override servers[0].description + - target: $.servers[0].description + update: InfluxDB 3 Core API URL + + # Override servers[0].variables.baseurl.description + - target: $.servers[0].variables.baseurl.description + update: InfluxDB 3 Core URL diff --git a/api-docs/influxdb3/core/v3/ref.yml b/api-docs/influxdb3/core/v3/ref.yml index 911e207ced..3010cd9b29 100644 --- a/api-docs/influxdb3/core/v3/ref.yml +++ b/api-docs/influxdb3/core/v3/ref.yml @@ -21,7 +21,7 @@ info: This documentation is generated from the [InfluxDB OpenAPI specification](https://raw.githubusercontent.com/influxdata/). --> - version: '3.7.0' + version: '' license: name: MIT url: https://opensource.org/licenses/MIT @@ -41,6 +41,8 @@ servers: security: - BearerAuthentication: [] tags: + - name: Auth token + description: Manage tokens for authentication and authorization - name: Authentication description: | Depending on your workflow, use one of the following schemes to authenticate to the InfluxDB 3 API: @@ -85,22 +87,8 @@ tags: #### Related guides - - [Manage the Distinct Value Cache](/influxdb3/core/admin/distinct-value-cache/) - - [Manage the Last Value Cache](/influxdb3/core/admin/last-value-cache/) - - name: Migrate from InfluxDB v1 or v2 - x-traitTag: true - description: | - Migrate your existing InfluxDB v1 or v2 workloads to InfluxDB 3. - - InfluxDB 3 provides compatibility endpoints that work with InfluxDB 1.x and 2.x client libraries and tools. - Operations marked with v1 or v2 badges are compatible with the respective InfluxDB version. - - ### Migration guides - - - [Migrate from InfluxDB v1](/influxdb3/core/guides/migrate/influxdb-1x/) - For users migrating from InfluxDB 1.x - - [Migrate from InfluxDB v2](/influxdb3/core/guides/migrate/influxdb-2x/) - For users migrating from InfluxDB 2.x or Cloud - - [Use compatibility APIs to write data](/influxdb3/core/write-data/http-api/compatibility-apis/) - v1 and v2 write endpoints - - [Use the v1 HTTP query API](/influxdb3/core/query-data/execute-queries/influxdb-v1-api/) - InfluxQL queries via HTTP + - [Manage the Distinct Value Cache](/influxdb/version/admin/distinct-value-cache/) + - [Manage the Last Value Cache](/influxdb/version/admin/last-value-cache/) - name: Database description: Manage databases - description: | @@ -129,14 +117,28 @@ tags: | `Content-Type` | string | The format of the data in the request body. | name: Headers and parameters x-traitTag: true + - name: Migrate from InfluxDB v1 or v2 + x-traitTag: true + description: | + Migrate your existing InfluxDB v1 or v2 workloads to InfluxDB 3. + + InfluxDB 3 provides compatibility endpoints that work with InfluxDB 1.x and 2.x client libraries and tools. + Operations marked with v1 or v2 badges are compatible with the respective InfluxDB version. + + ### Migration guides + + - [Migrate from InfluxDB v1](/influxdb/version/guides/migrate/influxdb-1x/) - For users migrating from InfluxDB 1.x + - [Migrate from InfluxDB v2](/influxdb/version/guides/migrate/influxdb-2x/) - For users migrating from InfluxDB 2.x or Cloud + - [Use compatibility APIs to write data](/influxdb/version/write-data/http-api/compatibility-apis/) - v1 and v2 write endpoints + - [Use the v1 HTTP query API](/influxdb/version/query-data/execute-queries/influxdb-v1-api/) - InfluxQL queries via HTTP - name: Processing engine description: | Manage Processing engine triggers, test plugins, and send requests to trigger On Request plugins. - InfluxDB 3 Core provides the InfluxDB 3 processing engine, an embedded Python VM that can dynamically load and trigger Python plugins in response to events in your database. + InfluxDB 3 provides the InfluxDB 3 processing engine, an embedded Python VM that can dynamically load and trigger Python plugins in response to events in your database. Use Processing engine plugins and triggers to run code and perform tasks for different database events. - To get started with the processing engine, see the [Processing engine and Python plugins](/influxdb3/core/processing-engine/) guide. + To get started with the processing engine, see the [Processing engine and Python plugins](/influxdb/version/processing-engine/) guide. - name: Query data description: Query data using SQL or InfluxQL - name: Quick start @@ -180,14 +182,12 @@ tags: {"room":"Living room","temp":71.5,"time":"2025-02-25T20:19:34.984098"} ``` - For more information about using InfluxDB 3 Core, see the [Get started](/influxdb3/core/get-started/) guide. + For more information about using InfluxDB 3, see the [Get started](/influxdb/version/get-started/) guide. x-traitTag: true - name: Server information description: Retrieve server metrics, status, and version information - name: Table description: Manage table schemas and data - - name: Auth token - description: Manage tokens for authentication and authorization - name: Write data description: | Write data to InfluxDB 3 using line protocol format. @@ -217,10 +217,10 @@ paths: x-compatibility-version: v1 description: | Writes line protocol to the specified database. - Use this endpoint to send data in [line protocol](/influxdb3/core/reference/syntax/line-protocol/) format to InfluxDB. + Use this endpoint to send data in [line protocol](/influxdb/version/reference/syntax/line-protocol/) format to InfluxDB. externalDocs: description: Use compatibility APIs to write data - url: /influxdb3/core/write-data/http-api/compatibility-apis/ + url: /influxdb/version/write-data/http-api/compatibility-apis/ parameters: - $ref: '#/components/parameters/dbWriteParam' - $ref: '#/components/parameters/compatibilityPrecisionParam' @@ -287,7 +287,7 @@ paths: The response body: - indicates if a partial write occurred or all data was rejected. - - contains details about the [rejected points](/influxdb3/core/write-data/troubleshoot/#troubleshoot-rejected-points), up to 100 points. + - contains details about the [rejected points](/influxdb/version/write-data/troubleshoot/#troubleshoot-rejected-points), up to 100 points. content: application/json: examples: @@ -332,7 +332,7 @@ paths: - Write data x-influxdata-guides: - title: Use compatibility APIs to write data - href: /influxdb3/core/write-data/http-api/compatibility-apis/ + href: /influxdb/version/write-data/http-api/compatibility-apis/ /api/v2/write: post: operationId: PostV2Write @@ -340,10 +340,10 @@ paths: x-compatibility-version: v2 description: | Writes line protocol to the specified database. - Use this endpoint to send data in [line protocol](/influxdb3/core/reference/syntax/line-protocol/) format to InfluxDB. + Use this endpoint to send data in [line protocol](/influxdb/version/reference/syntax/line-protocol/) format to InfluxDB. externalDocs: description: Use compatibility APIs to write data - url: /influxdb3/core/write-data/http-api/compatibility-apis/ + url: /influxdb/version/write-data/http-api/compatibility-apis/ parameters: - name: Content-Type in: header @@ -423,7 +423,7 @@ paths: - Write data x-influxdata-guides: - title: Use compatibility APIs to write data - href: /influxdb3/core/write-data/http-api/compatibility-apis/ + href: /influxdb/version/write-data/http-api/compatibility-apis/ /api/v3/write_lp: post: operationId: PostWriteLP @@ -431,10 +431,10 @@ paths: description: | Writes line protocol to the specified database. - This is the native InfluxDB 3 Core write endpoint that provides enhanced control + This is the native InfluxDB 3 write endpoint that provides enhanced control over write behavior with advanced parameters for high-performance and fault-tolerant operations. - Use this endpoint to send data in [line protocol](/influxdb3/core/reference/syntax/line-protocol/) format to InfluxDB. + Use this endpoint to send data in [line protocol](/influxdb/version/reference/syntax/line-protocol/) format to InfluxDB. Use query parameters to specify options for writing data. #### Features @@ -455,7 +455,7 @@ paths: #### Related - - [Use the InfluxDB v3 write_lp API to write data](/influxdb3/core/write-data/http-api/v3-write-lp/) + - [Use the InfluxDB v3 write_lp API to write data](/influxdb/version/write-data/http-api/v3-write-lp/) parameters: - $ref: '#/components/parameters/dbWriteParam' - $ref: '#/components/parameters/accept_partial' @@ -724,7 +724,7 @@ paths: Compatible with InfluxDB 1.x client libraries and third-party integrations such as Grafana. externalDocs: description: Use the InfluxDB v1 HTTP query API and InfluxQL to query data - url: /influxdb3/core/query-data/execute-queries/influxdb-v1-api/ + url: /influxdb/version/query-data/execute-queries/influxdb-v1-api/ parameters: - name: Accept in: header @@ -778,8 +778,8 @@ paths: type: string - name: epoch description: | - Formats timestamps as [unix (epoch) timestamps](/influxdb3/core/reference/glossary/#unix-timestamp) with the specified precision - instead of [RFC3339 timestamps](/influxdb3/core/reference/glossary/#rfc3339-timestamp) with nanosecond precision. + Formats timestamps as [unix (epoch) timestamps](/influxdb/version/reference/glossary/#unix-timestamp) with the specified precision + instead of [RFC3339 timestamps](/influxdb/version/reference/glossary/#rfc3339-timestamp) with nanosecond precision. in: query schema: $ref: '#/components/schemas/EpochCompatibility' @@ -849,7 +849,7 @@ paths: - Query data x-influxdata-guides: - title: Use the InfluxDB v1 HTTP query API and InfluxQL to query data - href: /influxdb3/core/query-data/execute-queries/influxdb-v1-api/ + href: /influxdb/version/query-data/execute-queries/influxdb-v1-api/ post: operationId: PostExecuteV1Query summary: Execute InfluxQL query (v1-compatible) @@ -857,7 +857,7 @@ paths: description: Executes an InfluxQL query to retrieve data from the specified database. externalDocs: description: Use the InfluxDB v1 HTTP query API and InfluxQL to query data - url: /influxdb3/core/query-data/execute-queries/influxdb-v1-api/ + url: /influxdb/version/query-data/execute-queries/influxdb-v1-api/ requestBody: content: application/json: @@ -891,8 +891,8 @@ paths: - `u` or `µ` for microseconds - `ns` for nanoseconds - Formats timestamps as [unix (epoch) timestamps](/influxdb3/core/reference/glossary/#unix-timestamp) with the specified precision - instead of [RFC3339 timestamps](/influxdb3/core/reference/glossary/#rfc3339-timestamp) with nanosecond precision. + Formats timestamps as [unix (epoch) timestamps](/influxdb/version/reference/glossary/#unix-timestamp) with the specified precision + instead of [RFC3339 timestamps](/influxdb/version/reference/glossary/#rfc3339-timestamp) with nanosecond precision. enum: - ns - u @@ -971,7 +971,7 @@ paths: - Query data x-influxdata-guides: - title: Use the InfluxDB v1 HTTP query API and InfluxQL to query data - href: /influxdb3/core/query-data/execute-queries/influxdb-v1-api/ + href: /influxdb/version/query-data/execute-queries/influxdb-v1-api/ /health: get: operationId: GetHealth @@ -1291,7 +1291,7 @@ paths: ### Related guides - - [Processing engine and Python plugins](/influxdb3/core/plugins/) + - [Processing engine and Python plugins](/influxdb/version/plugins/) requestBody: required: true content: @@ -1507,7 +1507,7 @@ paths: ### Related guides - - [Processing engine and Python plugins](/influxdb3/core/plugins/) + - [Processing engine and Python plugins](/influxdb/version/plugins/) parameters: - $ref: '#/components/parameters/ContentType' requestBody: @@ -1557,7 +1557,7 @@ paths: ### Related - - [Processing engine and Python plugins](/influxdb3/core/plugins/) + - [Processing engine and Python plugins](/influxdb/version/plugins/) - [Python requirements file format](https://pip.pypa.io/en/stable/reference/requirements-file-format/) parameters: - $ref: '#/components/parameters/ContentType' @@ -1723,7 +1723,7 @@ paths: $ref: '#/components/responses/Unauthorized' security: - BearerAuthentication: [] - - {} # No auth required for initial token creation + - {} tags: - Auth token /api/v3/configure/token/admin/regenerate: @@ -2060,8 +2060,8 @@ components: #### Related - - [Use the HTTP API and client libraries to write data](/influxdb3/core/write-data/api-client-libraries/) - - [Data durability](/influxdb3/core/reference/internals/durability/) + - [Use the HTTP API and client libraries to write data](/influxdb/version/write-data/api-client-libraries/) + - [Data durability](/influxdb/version/reference/internals/durability/) PrecisionWriteCompatibility: enum: - ms @@ -2493,6 +2493,36 @@ components: description: The current status of the license. example: active description: Response schema for license information. + ResourceTokenObject: + type: object + properties: + token_name: + type: string + permissions: + type: array + items: + type: object + properties: + resource_type: + type: string + enum: + - system + - db + resource_identifier: + type: array + items: + type: string + actions: + type: array + items: + type: string + enum: + - read + - write + expiry_secs: + type: integer + description: The expiration time in seconds. + description: Response schema for resource token creation. responses: Unauthorized: description: Unauthorized access. @@ -2558,13 +2588,13 @@ components: Replace the following: - - **`DATABASE_NAME`**: your InfluxDB 3 Core database + - **`DATABASE_NAME`**: your InfluxDB 3 database - **`AUTH_TOKEN`**: an admin token #### Related guides - - [Authenticate v1 API requests](/influxdb3/core/guides/api-compatibility/v1/) - - [Manage tokens](/influxdb3/core/admin/tokens/) + - [Authenticate v1 API requests](/influxdb/version/guides/api-compatibility/v1/) + - [Manage tokens](/influxdb/version/admin/tokens/) QuerystringAuthentication: type: apiKey in: query @@ -2594,7 +2624,7 @@ components: Replace the following: - - **`DATABASE_NAME`**: your InfluxDB 3 Core database + - **`DATABASE_NAME`**: your InfluxDB 3 database - **`AUTH_TOKEN`**: an admin token ```bash @@ -2615,12 +2645,12 @@ components: Replace the following: - **`DATABASE_NAME`**: the database to query - - **`AUTH_TOKEN`**: an [admin token](/influxdb3/core/admin/tokens/) + - **`AUTH_TOKEN`**: an [admin token](/influxdb/version/admin/tokens/) #### Related guides - - [Authenticate v1 API requests](/influxdb3/core/guides/api-compatibility/v1/) - - [Manage tokens](/influxdb3/core/admin/tokens/) + - [Authenticate v1 API requests](/influxdb/version/guides/api-compatibility/v1/) + - [Manage tokens](/influxdb/version/admin/tokens/) BearerAuthentication: type: http scheme: bearer @@ -2679,7 +2709,23 @@ components: ### Related guides - - [Manage tokens](/influxdb3/core/admin/tokens/) + - [Manage tokens](/influxdb/version/admin/tokens/) in: header name: Authorization type: apiKey +x-tagGroups: + - name: Using the InfluxDB HTTP API + tags: + - Quick start + - Authentication + - Cache data + - Common parameters + - Response codes + - Compatibility endpoints + - Database + - Processing engine + - Server information + - Table + - Token + - Query data + - Write data diff --git a/api-docs/influxdb3/enterprise/v3/overlay.yml b/api-docs/influxdb3/enterprise/v3/overlay.yml new file mode 100644 index 0000000000..86e498a73f --- /dev/null +++ b/api-docs/influxdb3/enterprise/v3/overlay.yml @@ -0,0 +1,142 @@ +overlay: 1.0.0 +info: + title: Enterprise product overlay + version: 1.0.0 + +actions: + # Override info.title + - target: $.info.title + update: InfluxDB 3 Enterprise API Service + + # Override info.description + - target: $.info.description + update: | + The InfluxDB HTTP API for InfluxDB 3 Enterprise provides a programmatic interface for + interacting with InfluxDB 3 Enterprise databases and resources. + Use this API to: + - Write data to InfluxDB 3 Enterprise databases + - Query data using SQL or InfluxQL + - Process data using Processing engine plugins + - Manage databases, tables, and Processing engine triggers + - Perform administrative tasks and access system information + The API includes endpoints under the following paths: + - `/api/v3`: InfluxDB 3 Enterprise native endpoints + - `/`: Compatibility endpoints for InfluxDB v1 workloads and clients + - `/api/v2/write`: Compatibility endpoint for InfluxDB v2 workloads and clients + + + # Override servers[0].description + - target: $.servers[0].description + update: InfluxDB 3 Enterprise API URL + + # Override servers[0].variables.baseurl.description + - target: $.servers[0].variables.baseurl.description + update: InfluxDB 3 Enterprise URL + + # Enterprise-only: Add PATCH method to /api/v3/configure/table + - target: $.paths['/api/v3/configure/table'].patch + update: + operationId: PatchConfigureTable + summary: Update a table + description: | + Updates table configuration, such as retention period. + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/UpdateTableRequest' + responses: + '200': + description: Success. The table has been updated. + '400': + description: Bad request. + '401': + $ref: '#/components/responses/Unauthorized' + '404': + description: Table not found. + tags: + - Table + + # Enterprise-only: Add /api/v3/configure/database/{db} path + - target: $.paths['/api/v3/configure/database/{db}'] + update: + patch: + operationId: PatchConfigureDatabase + summary: Update a database + description: | + Updates database configuration, such as retention period. + parameters: + - name: db + in: path + required: true + schema: + type: string + description: The name of the database to update. + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/UpdateDatabaseRequest' + responses: + '200': + description: Success. The database has been updated. + '400': + description: Bad request. + '401': + $ref: '#/components/responses/Unauthorized' + '404': + description: Database not found. + tags: + - Database + + # Enterprise-only: Add /api/v3/show/license path + - target: $.paths['/api/v3/show/license'] + update: + get: + operationId: GetShowLicense + summary: Show license information + description: | + Retrieves information about the current InfluxDB 3 Enterprise license. + responses: + '200': + description: Success. The response body contains license information. + content: + application/json: + schema: + $ref: '#/components/schemas/LicenseResponse' + '401': + $ref: '#/components/responses/Unauthorized' + '403': + description: Access denied. + tags: + - Server information + + # Enterprise-only: Add /api/v3/configure/enterprise/token path + - target: $.paths['/api/v3/configure/enterprise/token'] + update: + post: + operationId: PostCreateResourceToken + summary: Create a resource token + description: | + Creates a resource (fine-grained permissions) token. + A resource token is a token that has access to specific resources in the system. + This endpoint is only available in InfluxDB 3 Enterprise. + responses: + '201': + description: | + Success. The resource token has been created. + The response body contains the token string and metadata. + content: + application/json: + schema: + $ref: '#/components/schemas/ResourceTokenObject' + '401': + $ref: '#/components/responses/Unauthorized' + tags: + - Authentication + - Auth token diff --git a/api-docs/influxdb3/enterprise/v3/ref.yml b/api-docs/influxdb3/enterprise/v3/ref.yml index bdfa3cfea5..b7bf7c2c02 100644 --- a/api-docs/influxdb3/enterprise/v3/ref.yml +++ b/api-docs/influxdb3/enterprise/v3/ref.yml @@ -5,20 +5,23 @@ info: The InfluxDB HTTP API for InfluxDB 3 Enterprise provides a programmatic interface for interacting with InfluxDB 3 Enterprise databases and resources. Use this API to: + - Write data to InfluxDB 3 Enterprise databases - Query data using SQL or InfluxQL - Process data using Processing engine plugins - Manage databases, tables, and Processing engine triggers - Perform administrative tasks and access system information + The API includes endpoints under the following paths: - `/api/v3`: InfluxDB 3 Enterprise native endpoints - `/`: Compatibility endpoints for InfluxDB v1 workloads and clients - `/api/v2/write`: Compatibility endpoint for InfluxDB v2 workloads and clients + - version: '3.7.0' + version: '' license: name: MIT url: https://opensource.org/licenses/MIT @@ -38,68 +41,74 @@ servers: security: - BearerAuthentication: [] tags: + - name: Auth token + description: Manage tokens for authentication and authorization - name: Authentication description: | Depending on your workflow, use one of the following schemes to authenticate to the InfluxDB 3 API: + | Authentication scheme | Works with | |:----------------------|:-----------| | Bearer authentication | All endpoints | | Token authentication | v1 and v2 compatibility endpoints (`/write`, `/query`, `/api/v2/write`) | | Basic authentication | v1 compatibility endpoints (`/write`, `/query`) | | Querystring authentication | v1 compatibility endpoints (`/write`, `/query`) | + See the **Security Schemes** section below for details on each authentication method. x-traitTag: true - name: Cache data description: | Manage the in-memory cache. + #### Distinct Value Cache + The Distinct Value Cache (DVC) lets you cache distinct values of one or more columns in a table, improving the performance of queries that return distinct tag and field values. + The DVC is an in-memory cache that stores distinct values for specific columns in a table. When you create an DVC, you can specify what columns' distinct values to cache, the maximum number of distinct value combinations to cache, and the maximum age of cached values. A DVC is associated with a table, which can have multiple DVCs. + #### Last value cache + The Last Value Cache (LVC) lets you cache the most recent values for specific fields in a table, improving the performance of queries that return the most recent value of a field for specific series or the last N values of a field. + The LVC is an in-memory cache that stores the last N number of values for specific fields of series in a table. When you create an LVC, you can specify what fields to cache, what tags to use to identify each series, and the number of values to cache for each unique series. An LVC is associated with a table, which can have multiple LVCs. + #### Related guides - - [Manage the Distinct Value Cache](/influxdb3/enterprise/admin/distinct-value-cache/) - - [Manage the Last Value Cache](/influxdb3/enterprise/admin/last-value-cache/) - - name: Migrate from InfluxDB v1 or v2 - x-traitTag: true - description: | - Migrate your existing InfluxDB v1 or v2 workloads to InfluxDB 3. - InfluxDB 3 provides compatibility endpoints that work with InfluxDB 1.x and 2.x client libraries and tools. - Operations marked with v1 or v2 badges are compatible with the respective InfluxDB version. - ### Migration guides - - [Migrate from InfluxDB v1](/influxdb3/enterprise/guides/migrate/influxdb-1x/) - For users migrating from InfluxDB 1.x - - [Migrate from InfluxDB v2](/influxdb3/enterprise/guides/migrate/influxdb-2x/) - For users migrating from InfluxDB 2.x or Cloud - - [Use compatibility APIs to write data](/influxdb3/enterprise/write-data/http-api/compatibility-apis/) - v1 and v2 write endpoints - - [Use the v1 HTTP query API](/influxdb3/enterprise/query-data/execute-queries/influxdb-v1-api/) - InfluxQL queries via HTTP + + - [Manage the Distinct Value Cache](/influxdb/version/admin/distinct-value-cache/) + - [Manage the Last Value Cache](/influxdb/version/admin/last-value-cache/) - name: Database description: Manage databases - description: | Most InfluxDB API endpoints require parameters in the request--for example, specifying the database to use. + ### Common parameters + The following table shows common parameters used by many InfluxDB API endpoints. Many endpoints may require other parameters in the query string or in the request body that perform functions specific to those endpoints. + | Query parameter | Value type | Description | |:------------------------ |:--------------------- |:-------------------------------------------| | `db` | string | The database name | + InfluxDB HTTP API endpoints use standard HTTP request and response headers. The following table shows common headers used by many InfluxDB API endpoints. Some endpoints may use other headers that perform functions more specific to those endpoints--for example, the write endpoints accept the `Content-Encoding` header to indicate that line protocol is compressed in the request body. + | Header | Value type | Description | |:------------------------ |:--------------------- |:-------------------------------------------| | `Accept` | string | The content type that the client can understand. | @@ -108,34 +117,57 @@ tags: | `Content-Type` | string | The format of the data in the request body. | name: Headers and parameters x-traitTag: true + - name: Migrate from InfluxDB v1 or v2 + x-traitTag: true + description: | + Migrate your existing InfluxDB v1 or v2 workloads to InfluxDB 3. + + InfluxDB 3 provides compatibility endpoints that work with InfluxDB 1.x and 2.x client libraries and tools. + Operations marked with v1 or v2 badges are compatible with the respective InfluxDB version. + + ### Migration guides + + - [Migrate from InfluxDB v1](/influxdb/version/guides/migrate/influxdb-1x/) - For users migrating from InfluxDB 1.x + - [Migrate from InfluxDB v2](/influxdb/version/guides/migrate/influxdb-2x/) - For users migrating from InfluxDB 2.x or Cloud + - [Use compatibility APIs to write data](/influxdb/version/write-data/http-api/compatibility-apis/) - v1 and v2 write endpoints + - [Use the v1 HTTP query API](/influxdb/version/query-data/execute-queries/influxdb-v1-api/) - InfluxQL queries via HTTP - name: Processing engine description: | Manage Processing engine triggers, test plugins, and send requests to trigger On Request plugins. - InfluxDB 3 Enterprise provides the InfluxDB 3 processing engine, an embedded Python VM that can dynamically load and trigger Python plugins in response to events in your database. + + InfluxDB 3 provides the InfluxDB 3 processing engine, an embedded Python VM that can dynamically load and trigger Python plugins in response to events in your database. Use Processing engine plugins and triggers to run code and perform tasks for different database events. - To get started with the processing engine, see the [Processing engine and Python plugins](/influxdb3/enterprise/processing-engine/) guide. + + To get started with the processing engine, see the [Processing engine and Python plugins](/influxdb/version/processing-engine/) guide. - name: Query data description: Query data using SQL or InfluxQL - name: Quick start description: | 1. [Create an admin token](#section/Authentication) to authorize API requests. + ```bash curl -X POST "http://localhost:8181/api/v3/configure/token/admin" ``` 2. [Check the status](#section/Server-information) of the InfluxDB server. + ```bash curl "http://localhost:8181/health" \ --header "Authorization: Bearer ADMIN_TOKEN" ``` + 3. [Write data](#operation/PostWriteLP) to InfluxDB. + ```bash curl "http://localhost:8181/api/v3/write_lp?db=sensors&precision=auto" --header "Authorization: Bearer ADMIN_TOKEN" \ --data-raw "home,room=Kitchen temp=72.0 home,room=Living\ room temp=71.5" ``` + If all data is written, the response is `204 No Content`. + 4. [Query data](#operation/GetExecuteQuerySQL) from InfluxDB. + ```bash curl -G "http://localhost:8181/api/v3/query_sql" \ --header "Authorization: Bearer ADMIN_TOKEN" \ @@ -145,24 +177,26 @@ tags: ``` Output: + ```jsonl {"room":"Living room","temp":71.5,"time":"2025-02-25T20:19:34.984098"} ``` - For more information about using InfluxDB 3 Enterprise, see the [Get started](/influxdb3/enterprise/get-started/) guide. + For more information about using InfluxDB 3, see the [Get started](/influxdb/version/get-started/) guide. x-traitTag: true - name: Server information description: Retrieve server metrics, status, and version information - name: Table description: Manage table schemas and data - - name: Auth token - description: Manage tokens for authentication and authorization - name: Write data description: | Write data to InfluxDB 3 using line protocol format. + #### Timestamp precision across write APIs + InfluxDB 3 provides multiple write endpoints for compatibility with different InfluxDB versions. The following table compares timestamp precision support across v1, v2, and v3 write APIs: + | Precision | v1 (`/write`) | v2 (`/api/v2/write`) | v3 (`/api/v3/write_lp`) | |-----------|---------------|----------------------|-------------------------| | **Auto detection** | ❌ No | ❌ No | ✅ `auto` (default) | @@ -173,6 +207,7 @@ tags: | **Minutes** | ✅ `m` | ❌ No | ❌ No | | **Hours** | ✅ `h` | ❌ No | ❌ No | | **Default** | Nanosecond | Nanosecond | **Auto** (guessed) | + All timestamps are stored internally as nanoseconds. paths: /write: @@ -182,10 +217,10 @@ paths: x-compatibility-version: v1 description: | Writes line protocol to the specified database. - Use this endpoint to send data in [line protocol](/influxdb3/enterprise/reference/syntax/line-protocol/) format to InfluxDB. + Use this endpoint to send data in [line protocol](/influxdb/version/reference/syntax/line-protocol/) format to InfluxDB. externalDocs: description: Use compatibility APIs to write data - url: /influxdb3/enterprise/write-data/http-api/compatibility-apis/ + url: /influxdb/version/write-data/http-api/compatibility-apis/ parameters: - $ref: '#/components/parameters/dbWriteParam' - $ref: '#/components/parameters/compatibilityPrecisionParam' @@ -249,9 +284,10 @@ paths: description: | Bad request. Some (a _partial write_) or all of the data from the batch was rejected and not written. If a partial write occurred, then some points from the batch are written and queryable. + The response body: - indicates if a partial write occurred or all data was rejected. - - contains details about the [rejected points](/influxdb3/enterprise/write-data/troubleshoot/#troubleshoot-rejected-points), up to 100 points. + - contains details about the [rejected points](/influxdb/version/write-data/troubleshoot/#troubleshoot-rejected-points), up to 100 points. content: application/json: examples: @@ -296,7 +332,7 @@ paths: - Write data x-influxdata-guides: - title: Use compatibility APIs to write data - href: /influxdb3/enterprise/write-data/http-api/compatibility-apis/ + href: /influxdb/version/write-data/http-api/compatibility-apis/ /api/v2/write: post: operationId: PostV2Write @@ -304,10 +340,10 @@ paths: x-compatibility-version: v2 description: | Writes line protocol to the specified database. - Use this endpoint to send data in [line protocol](/influxdb3/enterprise/reference/syntax/line-protocol/) format to InfluxDB. + Use this endpoint to send data in [line protocol](/influxdb/version/reference/syntax/line-protocol/) format to InfluxDB. externalDocs: description: Use compatibility APIs to write data - url: /influxdb3/enterprise/write-data/http-api/compatibility-apis/ + url: /influxdb/version/write-data/http-api/compatibility-apis/ parameters: - name: Content-Type in: header @@ -387,30 +423,39 @@ paths: - Write data x-influxdata-guides: - title: Use compatibility APIs to write data - href: /influxdb3/enterprise/write-data/http-api/compatibility-apis/ + href: /influxdb/version/write-data/http-api/compatibility-apis/ /api/v3/write_lp: post: operationId: PostWriteLP summary: Write line protocol description: | Writes line protocol to the specified database. - This is the native InfluxDB 3 Enterprise write endpoint that provides enhanced control + + This is the native InfluxDB 3 write endpoint that provides enhanced control over write behavior with advanced parameters for high-performance and fault-tolerant operations. - Use this endpoint to send data in [line protocol](/influxdb3/enterprise/reference/syntax/line-protocol/) format to InfluxDB. + + Use this endpoint to send data in [line protocol](/influxdb/version/reference/syntax/line-protocol/) format to InfluxDB. Use query parameters to specify options for writing data. + #### Features + - **Partial writes**: Use `accept_partial=true` to allow partial success when some lines in a batch fail - **Asynchronous writes**: Use `no_sync=true` to skip waiting for WAL synchronization, allowing faster response times but sacrificing durability guarantees - **Flexible precision**: Automatic timestamp precision detection with `precision=auto` (default) + #### Auto precision detection + When you use `precision=auto` or omit the precision parameter, InfluxDB 3 automatically detects the timestamp precision based on the magnitude of the timestamp value: + - Timestamps < 5e9 → Second precision (multiplied by 1,000,000,000 to convert to nanoseconds) - Timestamps < 5e12 → Millisecond precision (multiplied by 1,000,000) - Timestamps < 5e15 → Microsecond precision (multiplied by 1,000) - Larger timestamps → Nanosecond precision (no conversion needed) + #### Related - - [Use the InfluxDB v3 write_lp API to write data](/influxdb3/enterprise/write-data/http-api/v3-write-lp/) + + - [Use the InfluxDB v3 write_lp API to write data](/influxdb/version/write-data/http-api/v3-write-lp/) parameters: - $ref: '#/components/parameters/dbWriteParam' - $ref: '#/components/parameters/accept_partial' @@ -679,7 +724,7 @@ paths: Compatible with InfluxDB 1.x client libraries and third-party integrations such as Grafana. externalDocs: description: Use the InfluxDB v1 HTTP query API and InfluxQL to query data - url: /influxdb3/enterprise/query-data/execute-queries/influxdb-v1-api/ + url: /influxdb/version/query-data/execute-queries/influxdb-v1-api/ parameters: - name: Accept in: header @@ -693,7 +738,9 @@ paths: required: false description: | The content type that the client can understand. + If `text/csv` is specified, the `Content-type` response header is `application/csv` and the response is formatted as CSV. + Returns an error if the format is invalid or non-UTF8. - in: query name: chunked @@ -731,8 +778,8 @@ paths: type: string - name: epoch description: | - Formats timestamps as [unix (epoch) timestamps](/influxdb3/enterprise/reference/glossary/#unix-timestamp) with the specified precision - instead of [RFC3339 timestamps](/influxdb3/enterprise/reference/glossary/#rfc3339-timestamp) with nanosecond precision. + Formats timestamps as [unix (epoch) timestamps](/influxdb/version/reference/glossary/#unix-timestamp) with the specified precision + instead of [RFC3339 timestamps](/influxdb/version/reference/glossary/#rfc3339-timestamp) with nanosecond precision. in: query schema: $ref: '#/components/schemas/EpochCompatibility' @@ -772,6 +819,7 @@ paths: description: | The content type of the response. Default is `application/json`. + If the `Accept` request header is `application/csv` or `text/csv`, the `Content-type` response header is `application/csv` and the response is formatted as CSV. schema: @@ -801,7 +849,7 @@ paths: - Query data x-influxdata-guides: - title: Use the InfluxDB v1 HTTP query API and InfluxQL to query data - href: /influxdb3/enterprise/query-data/execute-queries/influxdb-v1-api/ + href: /influxdb/version/query-data/execute-queries/influxdb-v1-api/ post: operationId: PostExecuteV1Query summary: Execute InfluxQL query (v1-compatible) @@ -809,7 +857,7 @@ paths: description: Executes an InfluxQL query to retrieve data from the specified database. externalDocs: description: Use the InfluxDB v1 HTTP query API and InfluxQL to query data - url: /influxdb3/enterprise/query-data/execute-queries/influxdb-v1-api/ + url: /influxdb/version/query-data/execute-queries/influxdb-v1-api/ requestBody: content: application/json: @@ -835,14 +883,16 @@ paths: epoch: description: | A unix timestamp precision. + - `h` for hours - `m` for minutes - `s` for seconds - `ms` for milliseconds - `u` or `µ` for microseconds - `ns` for nanoseconds - Formats timestamps as [unix (epoch) timestamps](/influxdb3/enterprise/reference/glossary/#unix-timestamp) with the specified precision - instead of [RFC3339 timestamps](/influxdb3/enterprise/reference/glossary/#rfc3339-timestamp) with nanosecond precision. + + Formats timestamps as [unix (epoch) timestamps](/influxdb/version/reference/glossary/#unix-timestamp) with the specified precision + instead of [RFC3339 timestamps](/influxdb/version/reference/glossary/#rfc3339-timestamp) with nanosecond precision. enum: - ns - u @@ -871,7 +921,9 @@ paths: required: false description: | The content type that the client can understand. + If `text/csv` is specified, the `Content-type` response header is `application/csv` and the response is formatted as CSV. + Returns an error if the format is invalid or non-UTF8. responses: '200': @@ -889,6 +941,7 @@ paths: description: | The content type of the response. Default is `application/json`. + If the `Accept` request header is `application/csv` or `text/csv`, the `Content-type` response header is `application/csv` and the response is formatted as CSV. schema: @@ -918,7 +971,7 @@ paths: - Query data x-influxdata-guides: - title: Use the InfluxDB v1 HTTP query API and InfluxQL to query data - href: /influxdb3/enterprise/query-data/execute-queries/influxdb-v1-api/ + href: /influxdb/version/query-data/execute-queries/influxdb-v1-api/ /health: get: operationId: GetHealth @@ -977,15 +1030,6 @@ paths: description: Retrieves a list of databases. parameters: - $ref: '#/components/parameters/formatRequired' - - name: show_deleted - in: query - required: false - schema: - type: boolean - default: false - description: | - Include soft-deleted databases in the response. - By default, only active databases are returned. responses: '200': description: Success. The response body contains the list of databases. @@ -1041,7 +1085,9 @@ paths: Schedule the database for hard deletion at the specified time. If not provided, the database will be soft deleted. Use ISO 8601 date-time format (for example, "2025-12-31T23:59:59Z"). + #### Deleting a database cannot be undone + Deleting a database is a destructive action. Once a database is deleted, data stored in that database cannot be recovered. responses: @@ -1059,11 +1105,12 @@ paths: summary: Remove database retention period description: | Removes the retention period from a database, setting it to infinite retention. + Data in the database will not expire based on time. parameters: - $ref: '#/components/parameters/db' responses: - '204': - description: Success. The database retention period has been removed. + '200': + description: Success. Retention period removed from database. '401': $ref: '#/components/responses/Unauthorized' '404': @@ -1099,7 +1146,9 @@ paths: Soft deletes a table. The table is scheduled for deletion and unavailable for querying. Use the `hard_delete_at` parameter to schedule a hard deletion. + #### Deleting a table cannot be undone + Deleting a table is a destructive action. Once a table is deleted, data stored in that table cannot be recovered. parameters: @@ -1150,55 +1199,6 @@ paths: description: Table not found. tags: - Table - /api/v3/configure/database/{db}: - patch: - operationId: PatchConfigureDatabase - summary: Update a database - description: | - Updates database configuration, such as retention period. - parameters: - - name: db - in: path - required: true - schema: - type: string - description: The name of the database to update. - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/UpdateDatabaseRequest' - responses: - '200': - description: Success. The database has been updated. - '400': - description: Bad request. - '401': - $ref: '#/components/responses/Unauthorized' - '404': - description: Database not found. - tags: - - Database - /api/v3/show/license: - get: - operationId: GetShowLicense - summary: Show license information - description: | - Retrieves information about the current InfluxDB 3 Enterprise license. - responses: - '200': - description: Success. The response body contains license information. - content: - application/json: - schema: - $ref: '#/components/schemas/LicenseResponse' - '401': - $ref: '#/components/responses/Unauthorized' - '403': - description: Access denied. - tags: - - Server information /api/v3/configure/distinct_cache: post: operationId: PostConfigureDistinctCache @@ -1220,6 +1220,7 @@ paths: '400': description: | Bad request. + The server responds with status `400` if the request would overwrite an existing cache with a different configuration. delete: operationId: DeleteConfigureDistinctCache @@ -1309,8 +1310,10 @@ paths: summary: Create processing engine trigger description: | Creates a processing engine trigger with the specified plugin file and trigger specification. + ### Related guides - - [Processing engine and Python plugins](/influxdb3/enterprise/plugins/) + + - [Processing engine and Python plugins](/influxdb/version/plugins/) requestBody: required: true content: @@ -1324,7 +1327,7 @@ paths: In `"cron:CRON_EXPRESSION"`, `CRON_EXPRESSION` uses extended 6-field cron format. The cron expression `0 0 6 * * 1-5` means the trigger will run at 6:00 AM every weekday (Monday to Friday). value: - db: DATABASE_NAME + db: mydb plugin_filename: schedule.py trigger_name: schedule_cron_trigger trigger_specification: cron:0 0 6 * * 1-5 @@ -1456,9 +1459,6 @@ paths: schema: type: boolean default: false - description: | - Force deletion of the trigger even if it has active executions. - By default, deletion fails if the trigger is currently executing. responses: '200': description: Success. The processing engine trigger has been deleted. @@ -1524,9 +1524,12 @@ paths: summary: Install plugin packages description: | Installs the specified Python packages into the processing engine plugin environment. + This endpoint is synchronous and blocks until the packages are installed. + ### Related guides - - [Processing engine and Python plugins](/influxdb3/enterprise/plugins/) + + - [Processing engine and Python plugins](/influxdb/version/plugins/) parameters: - $ref: '#/components/parameters/ContentType' requestBody: @@ -1571,9 +1574,12 @@ paths: summary: Install plugin requirements description: | Installs requirements from a requirements file (also known as a "pip requirements file") into the processing engine plugin environment. + This endpoint is synchronous and blocks until the requirements are installed. + ### Related - - [Processing engine and Python plugins](/influxdb3/enterprise/plugins/) + + - [Processing engine and Python plugins](/influxdb/version/plugins/) - [Python requirements file format](https://pip.pypa.io/en/stable/reference/requirements-file-format/) parameters: - $ref: '#/components/parameters/ContentType' @@ -1608,12 +1614,6 @@ paths: operationId: PostTestWALPlugin summary: Test WAL plugin description: Executes a test of a write-ahead logging (WAL) plugin. - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/WALPluginTestRequest' responses: '200': description: Success. The plugin test has been executed. @@ -1630,12 +1630,6 @@ paths: operationId: PostTestSchedulingPlugin summary: Test scheduling plugin description: Executes a test of a scheduling plugin. - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/SchedulePluginTestRequest' responses: '200': description: Success. The plugin test has been executed. @@ -1652,10 +1646,13 @@ paths: - name: request_path description: | The path configured in the request trigger specification for the plugin. + For example, if you define a trigger with the following: + ```json trigger_specification: "request:hello-world" ``` + then, the HTTP API exposes the following plugin endpoint: ``` @@ -1671,10 +1668,13 @@ paths: description: | Executes the On Request processing engine plugin specified in the trigger's `plugin_filename`. The request can include request headers, query string parameters, and a request body, which InfluxDB passes to the plugin. + An On Request plugin implements the following signature: + ```python def process_request(influxdb3_local, query_parameters, request_headers, request_body, args=None) ``` + The response depends on the plugin implementation. responses: '200': @@ -1695,10 +1695,13 @@ paths: description: | Executes the On Request processing engine plugin specified in the trigger's `plugin_filename`. The request can include request headers, query string parameters, and a request body, which InfluxDB passes to the plugin. + An On Request plugin implements the following signature: + ```python def process_request(influxdb3_local, query_parameters, request_headers, request_body, args=None) ``` + The response depends on the plugin implementation. parameters: - $ref: '#/components/parameters/ContentType' @@ -1722,28 +1725,6 @@ paths: description: Processing failure. tags: - Processing engine - /api/v3/configure/enterprise/token: - post: - operationId: PostCreateResourceToken - summary: Create a resource token - description: | - Creates a resource (fine-grained permissions) token. - A resource token is a token that has access to specific resources in the system. - This endpoint is only available in InfluxDB 3 Enterprise. - responses: - '201': - description: | - Success. The resource token has been created. - The response body contains the token string and metadata. - content: - application/json: - schema: - $ref: '#/components/schemas/ResourceTokenObject' - '401': - $ref: '#/components/responses/Unauthorized' - tags: - - Authentication - - Auth token /api/v3/configure/token/admin: post: operationId: PostCreateAdminToken @@ -1764,7 +1745,7 @@ paths: $ref: '#/components/responses/Unauthorized' security: - BearerAuthentication: [] - - {} # No auth required for initial token creation + - {} tags: - Auth token /api/v3/configure/token/admin/regenerate: @@ -1798,7 +1779,8 @@ paths: required: true schema: type: string - description: The ID of the token to delete. + description: | + The ID of the token to delete. responses: '204': description: Success. The token has been deleted. @@ -1815,14 +1797,15 @@ paths: summary: Create named admin token description: | Creates a named admin token. - A named admin token is a special type of admin token with a custom name for identification and management. + A named admin token is an admin token with a specific name identifier. parameters: - name: name in: query required: true schema: type: string - description: The name for the admin token. + description: | + The name for the admin token. responses: '201': description: | @@ -1846,12 +1829,6 @@ paths: description: | Updates a plugin file in the plugin directory. x-security-note: Requires an admin token - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/PluginFileRequest' responses: '204': description: Success. The plugin file has been updated. @@ -1868,21 +1845,86 @@ paths: description: | Updates the plugin directory configuration. x-security-note: Requires an admin token + responses: + '204': + description: Success. The plugin directory has been updated. + '401': + $ref: '#/components/responses/Unauthorized' + '403': + description: Forbidden. Admin token required. + tags: + - Processing engine + /api/v3/configure/database/{db}: + patch: + operationId: PatchConfigureDatabase + summary: Update a database + description: | + Updates database configuration, such as retention period. + parameters: + - name: db + in: path + required: true + schema: + type: string + description: The name of the database to update. requestBody: required: true content: application/json: schema: - $ref: '#/components/schemas/PluginDirectoryRequest' + $ref: '#/components/schemas/UpdateDatabaseRequest' responses: - '204': - description: Success. The plugin directory has been updated. + '200': + description: Success. The database has been updated. + '400': + description: Bad request. + '401': + $ref: '#/components/responses/Unauthorized' + '404': + description: Database not found. + tags: + - Database + /api/v3/show/license: + get: + operationId: GetShowLicense + summary: Show license information + description: | + Retrieves information about the current InfluxDB 3 Enterprise license. + responses: + '200': + description: Success. The response body contains license information. + content: + application/json: + schema: + $ref: '#/components/schemas/LicenseResponse' '401': $ref: '#/components/responses/Unauthorized' '403': - description: Forbidden. Admin token required. + description: Access denied. tags: - - Processing engine + - Server information + /api/v3/configure/enterprise/token: + post: + operationId: PostCreateResourceToken + summary: Create a resource token + description: | + Creates a resource (fine-grained permissions) token. + A resource token is a token that has access to specific resources in the system. + This endpoint is only available in InfluxDB 3 Enterprise. + responses: + '201': + description: | + Success. The resource token has been created. + The response body contains the token string and metadata. + content: + application/json: + schema: + $ref: '#/components/schemas/ResourceTokenObject' + '401': + $ref: '#/components/responses/Unauthorized' + tags: + - Authentication + - Auth token components: parameters: AcceptQueryHeader: @@ -1951,6 +1993,7 @@ components: type: string description: | The name of the database. + If you provide a query that specifies the database, you can omit the 'db' parameter from your request. accept_partial: name: accept_partial @@ -2010,7 +2053,7 @@ components: type: string description: | Password for v1 compatibility authentication. - For query string authentication, pass a database token with write permissions as this parameter. + For query string authentication, pass an admin token. InfluxDB 3 checks that the `p` value is an authorized token. requestBodies: lineProtocolRequestBody: @@ -2059,44 +2102,6 @@ components: hash: 00xx0Xx0xx00XX0x0 created_at: '2025-04-18T14:02:45.331Z' expiry: null - ResourceTokenObject: - type: object - properties: - token_name: - type: string - permissions: - type: array - items: - type: object - properties: - resource_type: - type: string - enum: - - system - - db - resource_identifier: - type: array - items: - type: string - actions: - type: array - items: - type: string - enum: - - read - - write - expiry_secs: - type: integer - description: The expiration time in seconds. - example: - token_name: All system information - permissions: - - resource_type: system - resource_identifier: - - '*' - actions: - - read - expiry_secs: 300000 ContentEncoding: type: string enum: @@ -2105,7 +2110,9 @@ components: description: | Content coding. Use `gzip` for compressed data or `identity` for unmodified, uncompressed data. + #### Multi-member gzip support + InfluxDB 3 supports multi-member gzip payloads (concatenated gzip files per [RFC 1952](https://www.rfc-editor.org/rfc/rfc1952)). This allows you to: - Concatenate multiple gzip files and send them in a single request @@ -2143,9 +2150,11 @@ components: default: false description: | Acknowledges a successful write without waiting for WAL persistence. + #### Related - - [Use the HTTP API and client libraries to write data](/influxdb3/enterprise/write-data/api-client-libraries/) - - [Data durability](/influxdb3/enterprise/reference/internals/durability/) + + - [Use the HTTP API and client libraries to write data](/influxdb/version/write-data/api-client-libraries/) + - [Data durability](/influxdb/version/reference/internals/durability/) PrecisionWriteCompatibility: enum: - ms @@ -2166,6 +2175,7 @@ components: type: string description: | The precision for unix timestamps in the line protocol batch. + Supported values: - `auto` (default): Automatically detects precision based on timestamp magnitude - `nanosecond`: Nanoseconds @@ -2211,13 +2221,9 @@ components: properties: db: type: string - pattern: ^[a-zA-Z0-9][a-zA-Z0-9-]*[a-zA-Z0-9]$|^[a-zA-Z0-9]$ - description: |- - The database name. Database names cannot contain underscores (_). - Names must start and end with alphanumeric characters and can contain hyphens (-) in the middle. retention_period: type: string - description: |- + description: | The retention period for the database. Specifies how long data should be retained. Use duration format (for example, "1d", "1h", "30m", "7d"). example: 7d @@ -2252,12 +2258,6 @@ components: required: - name - type - retention_period: - type: string - description: |- - The retention period for the table. Specifies how long data in this table should be retained. - Use duration format (for example, "1d", "1h", "30m", "7d"). - example: 30d required: - db - table @@ -2269,8 +2269,6 @@ components: type: string table: type: string - node_spec: - $ref: '#/components/schemas/ApiNodeSpec' name: type: string description: Optional cache name. @@ -2303,8 +2301,6 @@ components: type: string table: type: string - node_spec: - $ref: '#/components/schemas/ApiNodeSpec' name: type: string description: Optional cache name. @@ -2347,9 +2343,8 @@ components: The path and filename of the plugin to execute--for example, `schedule.py` or `endpoints/report.py`. The path can be absolute or relative to the `--plugins-dir` directory configured when starting InfluxDB 3. + The plugin file must implement the trigger interface associated with the trigger's specification. - node_spec: - $ref: '#/components/schemas/ApiNodeSpec' trigger_name: type: string trigger_settings: @@ -2358,11 +2353,15 @@ components: allOf: - $ref: '#/components/schemas/TriggerSettings' trigger_specification: + type: string description: | Specifies when and how the processing engine trigger should be invoked. + ## Supported trigger specifications: + ### Cron-based scheduling Format: `cron:CRON_EXPRESSION` + Uses extended (6-field) cron format (second minute hour day_of_month month day_of_week): ``` ┌───────────── second (0-59) @@ -2378,8 +2377,10 @@ components: - `cron:0 0 6 * * 1-5` - Every weekday at 6:00 AM - `cron:0 30 14 * * 5` - Every Friday at 2:30 PM - `cron:0 0 0 1 * *` - First day of every month at midnight + ### Interval-based scheduling Format: `every:DURATION` + Supported durations: `s` (seconds), `m` (minutes), `h` (hours), `d` (days), `w` (weeks), `M` (months), `y` (years): - `every:30s` - Every 30 seconds - `every:5m` - Every 5 minutes @@ -2388,12 +2389,16 @@ components: - `every:1w` - Every week - `every:1M` - Every month - `every:1y` - Every year + **Maximum interval**: 1 year + ### Table-based triggers - `all_tables` - Triggers on write events to any table in the database - `table:TABLE_NAME` - Triggers on write events to a specific table + ### On-demand triggers Format: `request:REQUEST_PATH` + Creates an HTTP endpoint `/api/v3/engine/REQUEST_PATH` for manual invocation: - `request:hello-world` - Creates endpoint `/api/v3/engine/hello-world` - `request:data-export` - Creates endpoint `/api/v3/engine/data-export` @@ -2440,133 +2445,6 @@ components: required: - run_async - error_behavior - ApiNodeSpec: - type: object - description: | - Optional specification for targeting specific nodes in a multi-node InfluxDB 3 Enterprise cluster. - Use this to control which node(s) should handle the cache or trigger. - properties: - node_id: - type: string - description: | - The ID of a specific node in the cluster. - If specified, the cache or trigger will only be created on this node. - node_group: - type: string - description: | - The name of a node group in the cluster. - If specified, the cache or trigger will be created on all nodes in this group. - WALPluginTestRequest: - type: object - description: | - Request body for testing a write-ahead logging (WAL) plugin. - properties: - filename: - type: string - description: | - The path and filename of the plugin to test. - database: - type: string - description: | - The database name to use for the test. - input_lp: - type: string - description: | - Line protocol data to use as input for the test. - cache_name: - type: string - description: | - Optional name of the cache to use in the test. - input_arguments: - type: object - additionalProperties: - type: string - description: | - Optional key-value pairs of arguments to pass to the plugin. - required: - - filename - - database - - input_lp - SchedulePluginTestRequest: - type: object - description: | - Request body for testing a scheduling plugin. - properties: - filename: - type: string - description: | - The path and filename of the plugin to test. - database: - type: string - description: | - The database name to use for the test. - schedule: - type: string - description: | - Optional schedule specification in cron or interval format. - cache_name: - type: string - description: | - Optional name of the cache to use in the test. - input_arguments: - type: object - additionalProperties: - type: string - description: | - Optional key-value pairs of arguments to pass to the plugin. - required: - - filename - - database - PluginFileRequest: - type: object - description: | - Request body for updating a plugin file. - properties: - plugin_name: - type: string - description: | - The name of the plugin file to update. - content: - type: string - description: | - The content of the plugin file. - required: - - plugin_name - - content - PluginDirectoryRequest: - type: object - description: | - Request body for updating plugin directory with multiple files. - properties: - plugin_name: - type: string - description: | - The name of the plugin directory to update. - files: - type: array - items: - $ref: '#/components/schemas/PluginFileEntry' - description: | - List of plugin files to include in the directory. - required: - - plugin_name - - files - PluginFileEntry: - type: object - description: | - Represents a single file in a plugin directory. - properties: - filename: - type: string - description: | - The name of the file within the plugin directory. - content: - type: string - description: | - The content of the file. - required: - - filename - - content ShowDatabasesResponse: type: object properties: @@ -2708,6 +2586,36 @@ components: description: The current status of the license. example: active description: Response schema for license information. + ResourceTokenObject: + type: object + properties: + token_name: + type: string + permissions: + type: array + items: + type: object + properties: + resource_type: + type: string + enum: + - system + - db + resource_identifier: + type: array + items: + type: string + actions: + type: array + items: + type: string + enum: + - read + - write + expiry_secs: + type: integer + description: The expiration time in seconds. + description: Response schema for resource token creation. responses: Unauthorized: description: Unauthorized access. @@ -2718,6 +2626,7 @@ components: BadRequest: description: | Request failed. Possible reasons: + - Invalid database name - Malformed request body - Invalid timestamp precision @@ -2755,83 +2664,109 @@ components: scheme: basic description: | Use the `Authorization` header with the `Basic` scheme to authenticate v1 API requests. + Works with v1 compatibility [`/write`](#operation/PostV1Write) and [`/query`](#operation/GetV1Query) endpoints in InfluxDB 3. + When authenticating requests, InfluxDB 3 checks that the `password` part of the decoded credential is an authorized token and ignores the `username` part of the decoded credential. - ### Syntax - ```http - Authorization: Basic - ``` + ### Example + ```bash curl "http://localhost:8181/write?db=DATABASE_NAME&precision=s" \ --user "":"AUTH_TOKEN" \ --header "Content-type: text/plain; charset=utf-8" \ --data-binary 'home,room=kitchen temp=72 1641024000' ``` + Replace the following: - - **`DATABASE_NAME`**: your InfluxDB 3 Enterprise database - - **`AUTH_TOKEN`**: an admin token or database token authorized for the database + + - **`DATABASE_NAME`**: your InfluxDB 3 database + - **`AUTH_TOKEN`**: an admin token + #### Related guides - - [Authenticate v1 API requests](/influxdb3/enterprise/guides/api-compatibility/v1/) - - [Manage tokens](/influxdb3/enterprise/admin/tokens/) + + - [Authenticate v1 API requests](/influxdb/version/guides/api-compatibility/v1/) + - [Manage tokens](/influxdb/version/admin/tokens/) QuerystringAuthentication: type: apiKey in: query name: u=&p= description: | Use InfluxDB 1.x API parameters to provide credentials through the query string for v1 API requests. + Querystring authentication works with v1-compatible [`/write`](#operation/PostV1Write) and [`/query`](#operation/GetV1Query) endpoints. + When authenticating requests, InfluxDB 3 checks that the `p` (_password_) query parameter is an authorized token and ignores the `u` (_username_) query parameter. + ### Syntax + ```http - https://localhost:8181/query/?[u=any]&p=AUTH_TOKEN - https://localhost:8181/write/?[u=any]&p=AUTH_TOKEN + http://localhost:8181/query/?[u=any]&p=DATABASE_TOKEN + http://localhost:8181/write/?[u=any]&p=DATABASE_TOKEN ``` + ### Examples + ```bash curl "http://localhost:8181/write?db=DATABASE_NAME&precision=s&p=AUTH_TOKEN" \ --header "Content-type: text/plain; charset=utf-8" \ --data-binary 'home,room=kitchen temp=72 1641024000' ``` + Replace the following: - - **`DATABASE_NAME`**: your InfluxDB 3 Enterprise database - - **`AUTH_TOKEN`**: an admin token or database token authorized for the database + + - **`DATABASE_NAME`**: your InfluxDB 3 database + - **`AUTH_TOKEN`**: an admin token + ```bash ####################################### # Use an InfluxDB 1.x compatible username and password # to query the InfluxDB v1 HTTP API ####################################### # Use authentication query parameters: - # ?p=AUTH_TOKEN + # ?p=DATABASE_TOKEN ####################################### + curl --get "http://localhost:8181/query" \ --data-urlencode "p=AUTH_TOKEN" \ --data-urlencode "db=DATABASE_NAME" \ --data-urlencode "q=SELECT * FROM MEASUREMENT" ``` + Replace the following: + - **`DATABASE_NAME`**: the database to query - - **`AUTH_TOKEN`**: a database token with sufficient permissions to the database + - **`AUTH_TOKEN`**: an [admin token](/influxdb/version/admin/tokens/) + #### Related guides - - [Authenticate v1 API requests](/influxdb3/enterprise/guides/api-compatibility/v1/) - - [Manage tokens](/influxdb3/enterprise/admin/tokens/) + + - [Authenticate v1 API requests](/influxdb/version/guides/api-compatibility/v1/) + - [Manage tokens](/influxdb/version/admin/tokens/) BearerAuthentication: type: http scheme: bearer bearerFormat: JWT description: | + Use the OAuth Bearer authentication scheme to provide an authorization token to InfluxDB 3. + Bearer authentication works with all endpoints. + In your API requests, send an `Authorization` header. - For the header value, provide the word `Bearer` followed by a space and a database token. + For the header value, provide the word `Bearer` followed by a space and an admin token. + + ### Syntax + ```http Authorization: Bearer AUTH_TOKEN ``` + ### Example + ```bash curl http://localhost:8181/api/v3/query_influxql \ --header "Authorization: Bearer AUTH_TOKEN" @@ -2839,26 +2774,51 @@ components: TokenAuthentication: description: | Use InfluxDB v2 Token authentication to provide an authorization token to InfluxDB 3. + The v2 Token scheme works with v1 and v2 compatibility endpoints in InfluxDB 3. + In your API requests, send an `Authorization` header. For the header value, provide the word `Token` followed by a space and a database token. The word `Token` is case-sensitive. + ### Syntax + ```http Authorization: Token AUTH_TOKEN ``` + ### Example + ```sh ######################################################## # Use the Token authentication scheme with /api/v2/write # to write data. ######################################################## + curl --request post "http://localhost:8181/api/v2/write?bucket=DATABASE_NAME&precision=s" \ --header "Authorization: Token AUTH_TOKEN" \ --data-binary 'home,room=kitchen temp=72 1463683075' ``` + ### Related guides - - [Manage tokens](/influxdb3/enterprise/admin/tokens/) + + - [Manage tokens](/influxdb/version/admin/tokens/) in: header name: Authorization type: apiKey +x-tagGroups: + - name: Using the InfluxDB HTTP API + tags: + - Quick start + - Authentication + - Cache data + - Common parameters + - Response codes + - Compatibility endpoints + - Database + - Processing engine + - Server information + - Table + - Token + - Query data + - Write data diff --git a/api-docs/influxdb3/shared/v3/base.yml b/api-docs/influxdb3/shared/v3/base.yml new file mode 100644 index 0000000000..110da8eb2c --- /dev/null +++ b/api-docs/influxdb3/shared/v3/base.yml @@ -0,0 +1,2715 @@ +openapi: 3.0.3 +info: + title: InfluxDB 3 API Service + description: | + The InfluxDB HTTP API for InfluxDB 3 provides a programmatic interface for + interacting with InfluxDB 3 databases and resources. + Use this API to: + + - Write data to InfluxDB 3 databases + - Query data using SQL or InfluxQL + - Process data using Processing engine plugins + - Manage databases, tables, and Processing engine triggers + - Perform administrative tasks and access system information + + The API includes endpoints under the following paths: + - `/api/v3`: InfluxDB 3 native endpoints + - `/`: Compatibility endpoints for InfluxDB v1 workloads and clients + - `/api/v2/write`: Compatibility endpoint for InfluxDB v2 workloads and clients + + + version: '3.7.0' + license: + name: MIT + url: https://opensource.org/licenses/MIT + contact: + name: InfluxData + url: https://www.influxdata.com + email: support@influxdata.com +servers: + - url: https://{baseurl} + description: InfluxDB 3 API URL + variables: + baseurl: + enum: + - localhost:8181 + default: localhost:8181 + description: InfluxDB 3 URL +security: + - BearerAuthentication: [] +tags: + - name: Authentication + description: | + Depending on your workflow, use one of the following schemes to authenticate to the InfluxDB 3 API: + + | Authentication scheme | Works with | + |:----------------------|:-----------| + | Bearer authentication | All endpoints | + | Token authentication | v1 and v2 compatibility endpoints (`/write`, `/query`, `/api/v2/write`) | + | Basic authentication | v1 compatibility endpoints (`/write`, `/query`) | + | Querystring authentication | v1 compatibility endpoints (`/write`, `/query`) | + + See the **Security Schemes** section below for details on each authentication method. + x-traitTag: true + - name: Cache data + description: | + Manage the in-memory cache. + + #### Distinct Value Cache + + The Distinct Value Cache (DVC) lets you cache distinct + values of one or more columns in a table, improving the performance of + queries that return distinct tag and field values. + + The DVC is an in-memory cache that stores distinct values for specific columns + in a table. When you create an DVC, you can specify what columns' distinct + values to cache, the maximum number of distinct value combinations to cache, and + the maximum age of cached values. A DVC is associated with a table, which can + have multiple DVCs. + + #### Last value cache + + The Last Value Cache (LVC) lets you cache the most recent + values for specific fields in a table, improving the performance of queries that + return the most recent value of a field for specific series or the last N values + of a field. + + The LVC is an in-memory cache that stores the last N number of values for + specific fields of series in a table. When you create an LVC, you can specify + what fields to cache, what tags to use to identify each series, and the + number of values to cache for each unique series. + An LVC is associated with a table, which can have multiple LVCs. + + #### Related guides + + - [Manage the Distinct Value Cache](/influxdb/version/admin/distinct-value-cache/) + - [Manage the Last Value Cache](/influxdb/version/admin/last-value-cache/) + - name: Migrate from InfluxDB v1 or v2 + x-traitTag: true + description: | + Migrate your existing InfluxDB v1 or v2 workloads to InfluxDB 3. + + InfluxDB 3 provides compatibility endpoints that work with InfluxDB 1.x and 2.x client libraries and tools. + Operations marked with v1 or v2 badges are compatible with the respective InfluxDB version. + + ### Migration guides + + - [Migrate from InfluxDB v1](/influxdb/version/guides/migrate/influxdb-1x/) - For users migrating from InfluxDB 1.x + - [Migrate from InfluxDB v2](/influxdb/version/guides/migrate/influxdb-2x/) - For users migrating from InfluxDB 2.x or Cloud + - [Use compatibility APIs to write data](/influxdb/version/write-data/http-api/compatibility-apis/) - v1 and v2 write endpoints + - [Use the v1 HTTP query API](/influxdb/version/query-data/execute-queries/influxdb-v1-api/) - InfluxQL queries via HTTP + - name: Database + description: Manage databases + - description: | + Most InfluxDB API endpoints require parameters in the request--for example, specifying the database to use. + + ### Common parameters + + The following table shows common parameters used by many InfluxDB API endpoints. + Many endpoints may require other parameters in the query string or in the + request body that perform functions specific to those endpoints. + + | Query parameter | Value type | Description | + |:------------------------ |:--------------------- |:-------------------------------------------| + | `db` | string | The database name | + + InfluxDB HTTP API endpoints use standard HTTP request and response headers. + The following table shows common headers used by many InfluxDB API endpoints. + Some endpoints may use other headers that perform functions more specific to those endpoints--for example, + the write endpoints accept the `Content-Encoding` header to indicate that line protocol is compressed in the request body. + + | Header | Value type | Description | + |:------------------------ |:--------------------- |:-------------------------------------------| + | `Accept` | string | The content type that the client can understand. | + | `Authorization` | string | The [authorization scheme and credential](/influxdb/version/api/authentication/). | + | `Content-Length` | integer | The size of the entity-body, in bytes. | + | `Content-Type` | string | The format of the data in the request body. | + name: Headers and parameters + x-traitTag: true + - name: Processing engine + description: | + Manage Processing engine triggers, test plugins, and send requests to trigger On Request plugins. + + InfluxDB 3 provides the InfluxDB 3 processing engine, an embedded Python VM that can dynamically load and trigger Python plugins in response to events in your database. + Use Processing engine plugins and triggers to run code and perform tasks for different database events. + + To get started with the processing engine, see the [Processing engine and Python plugins](/influxdb/version/processing-engine/) guide. + - name: Query data + description: Query data using SQL or InfluxQL + - name: Quick start + description: | + 1. [Create an admin token](#section/Authentication) to authorize API requests. + + ```bash + curl -X POST "http://localhost:8181/api/v3/configure/token/admin" + ``` + 2. [Check the status](#section/Server-information) of the InfluxDB server. + + ```bash + curl "http://localhost:8181/health" \ + --header "Authorization: Bearer ADMIN_TOKEN" + ``` + + 3. [Write data](#operation/PostWriteLP) to InfluxDB. + + ```bash + curl "http://localhost:8181/api/v3/write_lp?db=sensors&precision=auto" + --header "Authorization: Bearer ADMIN_TOKEN" \ + --data-raw "home,room=Kitchen temp=72.0 + home,room=Living\ room temp=71.5" + ``` + + If all data is written, the response is `204 No Content`. + + 4. [Query data](#operation/GetExecuteQuerySQL) from InfluxDB. + + ```bash + curl -G "http://localhost:8181/api/v3/query_sql" \ + --header "Authorization: Bearer ADMIN_TOKEN" \ + --data-urlencode "db=sensors" \ + --data-urlencode "q=SELECT * FROM home WHERE room='Living room'" \ + --data-urlencode "format=jsonl" + ``` + + Output: + + ```jsonl + {"room":"Living room","temp":71.5,"time":"2025-02-25T20:19:34.984098"} + ``` + + For more information about using InfluxDB 3, see the [Get started](/influxdb/version/get-started/) guide. + x-traitTag: true + - name: Server information + description: Retrieve server metrics, status, and version information + - name: Table + description: Manage table schemas and data + - name: Auth token + description: Manage tokens for authentication and authorization + - name: Write data + description: | + Write data to InfluxDB 3 using line protocol format. + + #### Timestamp precision across write APIs + + InfluxDB 3 provides multiple write endpoints for compatibility with different InfluxDB versions. + The following table compares timestamp precision support across v1, v2, and v3 write APIs: + + | Precision | v1 (`/write`) | v2 (`/api/v2/write`) | v3 (`/api/v3/write_lp`) | + |-----------|---------------|----------------------|-------------------------| + | **Auto detection** | ❌ No | ❌ No | ✅ `auto` (default) | + | **Seconds** | ✅ `s` | ✅ `s` | ✅ `second` | + | **Milliseconds** | ✅ `ms` | ✅ `ms` | ✅ `millisecond` | + | **Microseconds** | ✅ `u` or `µ` | ✅ `us` | ✅ `microsecond` | + | **Nanoseconds** | ✅ `ns` | ✅ `ns` | ✅ `nanosecond` | + | **Minutes** | ✅ `m` | ❌ No | ❌ No | + | **Hours** | ✅ `h` | ❌ No | ❌ No | + | **Default** | Nanosecond | Nanosecond | **Auto** (guessed) | + + All timestamps are stored internally as nanoseconds. +paths: + /write: + post: + operationId: PostV1Write + summary: Write line protocol (v1-compatible) + x-compatibility-version: v1 + description: | + Writes line protocol to the specified database. + Use this endpoint to send data in [line protocol](/influxdb/version/reference/syntax/line-protocol/) format to InfluxDB. + externalDocs: + description: Use compatibility APIs to write data + url: /influxdb/version/write-data/http-api/compatibility-apis/ + parameters: + - $ref: '#/components/parameters/dbWriteParam' + - $ref: '#/components/parameters/compatibilityPrecisionParam' + - $ref: '#/components/parameters/v1UsernameParam' + - $ref: '#/components/parameters/v1PasswordParam' + - name: rp + in: query + required: false + schema: + type: string + description: | + Retention policy name. Honored but discouraged. InfluxDB 3 doesn't use retention policies. + - name: consistency + in: query + required: false + schema: + type: string + description: | + Write consistency level. Ignored by InfluxDB 3. Provided for compatibility with InfluxDB 1.x clients. + - name: Authorization + in: header + required: false + schema: + type: string + description: | + Authorization header for token-based authentication. + Supported schemes: + - `Bearer AUTH_TOKEN` - OAuth bearer token scheme + - `Token AUTH_TOKEN` - InfluxDB v2 token scheme + - `Basic ` - Basic authentication (username is ignored) + - name: Content-Type + in: header + description: | + The content type of the request payload. + schema: + $ref: '#/components/schemas/LineProtocol' + required: false + - name: Accept + in: header + description: | + The content type that the client can understand. + Writes only return a response body if they fail (partially or completely)--for example, + due to a syntax problem or type mismatch. + schema: + type: string + default: application/json + enum: + - application/json + required: false + - $ref: '#/components/parameters/ContentEncoding' + - $ref: '#/components/parameters/ContentLength' + requestBody: + $ref: '#/components/requestBodies/lineProtocolRequestBody' + responses: + '204': + description: Success ("No Content"). All data in the batch is written and queryable. + headers: + cluster-uuid: + $ref: '#/components/headers/ClusterUUID' + '400': + description: | + Bad request. Some (a _partial write_) or all of the data from the batch was rejected and not written. + If a partial write occurred, then some points from the batch are written and queryable. + + The response body: + - indicates if a partial write occurred or all data was rejected. + - contains details about the [rejected points](/influxdb/version/write-data/troubleshoot/#troubleshoot-rejected-points), up to 100 points. + content: + application/json: + examples: + rejectedAllPoints: + summary: Rejected all points in the batch + value: | + { + "error": "write of line protocol failed", + "data": [ + { + "original_line": "dquote> home,room=Kitchen temp=hi", + "line_number": 2, + "error_message": "No fields were provided" + } + ] + } + partialWriteErrorWithRejectedPoints: + summary: Partial write rejected some points in the batch + value: | + { + "error": "partial write of line protocol occurred", + "data": [ + { + "original_line": "dquote> home,room=Kitchen temp=hi", + "line_number": 2, + "error_message": "No fields were provided" + } + ] + } + '401': + $ref: '#/components/responses/Unauthorized' + '403': + description: Access denied. + '413': + description: Request entity too large. + security: + - BearerAuthentication: [] + - TokenAuthentication: [] + - BasicAuthentication: [] + - QuerystringAuthentication: [] + tags: + - Write data + x-influxdata-guides: + - title: Use compatibility APIs to write data + href: /influxdb/version/write-data/http-api/compatibility-apis/ + /api/v2/write: + post: + operationId: PostV2Write + summary: Write line protocol (v2-compatible) + x-compatibility-version: v2 + description: | + Writes line protocol to the specified database. + Use this endpoint to send data in [line protocol](/influxdb/version/reference/syntax/line-protocol/) format to InfluxDB. + externalDocs: + description: Use compatibility APIs to write data + url: /influxdb/version/write-data/http-api/compatibility-apis/ + parameters: + - name: Content-Type + in: header + description: | + The content type of the request payload. + schema: + $ref: '#/components/schemas/LineProtocol' + required: false + - description: | + The compression applied to the line protocol in the request payload. + To send a gzip payload, pass `Content-Encoding: gzip` header. + in: header + name: Content-Encoding + schema: + default: identity + description: | + Content coding. + Use `gzip` for compressed data or `identity` for unmodified, uncompressed data. + enum: + - gzip + - identity + type: string + - description: | + The size of the entity-body, in bytes, sent to InfluxDB. + in: header + name: Content-Length + schema: + description: The length in decimal number of octets. + type: integer + - description: | + The content type that the client can understand. + Writes only return a response body if they fail (partially or completely)--for example, + due to a syntax problem or type mismatch. + in: header + name: Accept + schema: + default: application/json + description: Error content type. + enum: + - application/json + type: string + - name: db + in: query + required: true + schema: + type: string + description: | + A database name. + InfluxDB creates the database if it doesn't already exist, and then + writes all points in the batch to the database. + - name: accept_partial + in: query + required: false + schema: + $ref: '#/components/schemas/AcceptPartial' + - $ref: '#/components/parameters/compatibilityPrecisionParam' + requestBody: + $ref: '#/components/requestBodies/lineProtocolRequestBody' + responses: + '204': + description: Success ("No Content"). All data in the batch is written and queryable. + headers: + cluster-uuid: + $ref: '#/components/headers/ClusterUUID' + '400': + description: Bad request. + '401': + $ref: '#/components/responses/Unauthorized' + '403': + description: Access denied. + '413': + description: Request entity too large. + security: + - BearerAuthentication: [] + - TokenAuthentication: [] + tags: + - Write data + x-influxdata-guides: + - title: Use compatibility APIs to write data + href: /influxdb/version/write-data/http-api/compatibility-apis/ + /api/v3/write_lp: + post: + operationId: PostWriteLP + summary: Write line protocol + description: | + Writes line protocol to the specified database. + + This is the native InfluxDB 3 write endpoint that provides enhanced control + over write behavior with advanced parameters for high-performance and fault-tolerant operations. + + Use this endpoint to send data in [line protocol](/influxdb/version/reference/syntax/line-protocol/) format to InfluxDB. + Use query parameters to specify options for writing data. + + #### Features + + - **Partial writes**: Use `accept_partial=true` to allow partial success when some lines in a batch fail + - **Asynchronous writes**: Use `no_sync=true` to skip waiting for WAL synchronization, allowing faster response times but sacrificing durability guarantees + - **Flexible precision**: Automatic timestamp precision detection with `precision=auto` (default) + + #### Auto precision detection + + When you use `precision=auto` or omit the precision parameter, InfluxDB 3 automatically detects + the timestamp precision based on the magnitude of the timestamp value: + + - Timestamps < 5e9 → Second precision (multiplied by 1,000,000,000 to convert to nanoseconds) + - Timestamps < 5e12 → Millisecond precision (multiplied by 1,000,000) + - Timestamps < 5e15 → Microsecond precision (multiplied by 1,000) + - Larger timestamps → Nanosecond precision (no conversion needed) + + #### Related + + - [Use the InfluxDB v3 write_lp API to write data](/influxdb/version/write-data/http-api/v3-write-lp/) + parameters: + - $ref: '#/components/parameters/dbWriteParam' + - $ref: '#/components/parameters/accept_partial' + - $ref: '#/components/parameters/precisionParam' + - name: no_sync + in: query + schema: + $ref: '#/components/schemas/NoSync' + - name: Content-Type + in: header + description: | + The content type of the request payload. + schema: + $ref: '#/components/schemas/LineProtocol' + required: false + - name: Accept + in: header + description: | + The content type that the client can understand. + Writes only return a response body if they fail (partially or completely)--for example, + due to a syntax problem or type mismatch. + schema: + type: string + default: application/json + enum: + - application/json + required: false + - $ref: '#/components/parameters/ContentEncoding' + - $ref: '#/components/parameters/ContentLength' + requestBody: + $ref: '#/components/requestBodies/lineProtocolRequestBody' + responses: + '204': + description: Success ("No Content"). All data in the batch is written and queryable. + headers: + cluster-uuid: + $ref: '#/components/headers/ClusterUUID' + '400': + description: Bad request. + '401': + $ref: '#/components/responses/Unauthorized' + '403': + description: Access denied. + '413': + description: Request entity too large. + '422': + description: Unprocessable entity. + x-codeSamples: + - label: cURL - Basic write + lang: Shell + source: | + curl --request POST "http://localhost:8181/api/v3/write_lp?db=sensors" \ + --header "Authorization: Bearer DATABASE_TOKEN" \ + --header "Content-Type: text/plain" \ + --data-raw "cpu,host=server01 usage=85.2 1638360000000000000" + - label: cURL - Write with millisecond precision + lang: Shell + source: | + curl --request POST "http://localhost:8181/api/v3/write_lp?db=sensors&precision=ms" \ + --header "Authorization: Bearer DATABASE_TOKEN" \ + --header "Content-Type: text/plain" \ + --data-raw "cpu,host=server01 usage=85.2 1638360000000" + - label: cURL - Asynchronous write with partial acceptance + lang: Shell + source: | + curl --request POST "http://localhost:8181/api/v3/write_lp?db=sensors&accept_partial=true&no_sync=true&precision=auto" \ + --header "Authorization: Bearer DATABASE_TOKEN" \ + --header "Content-Type: text/plain" \ + --data-raw "cpu,host=server01 usage=85.2 + memory,host=server01 used=4096" + - label: cURL - Multiple measurements with tags + lang: Shell + source: | + curl --request POST "http://localhost:8181/api/v3/write_lp?db=sensors&precision=ns" \ + --header "Authorization: Bearer DATABASE_TOKEN" \ + --header "Content-Type: text/plain" \ + --data-raw "cpu,host=server01,region=us-west usage=85.2,load=0.75 1638360000000000000 + memory,host=server01,region=us-west used=4096,free=12288 1638360000000000000 + disk,host=server01,region=us-west,device=/dev/sda1 used=50.5,free=49.5 1638360000000000000" + tags: + - Write data + /api/v3/query_sql: + get: + operationId: GetExecuteQuerySQL + summary: Execute SQL query + description: Executes an SQL query to retrieve data from the specified database. + parameters: + - $ref: '#/components/parameters/db' + - $ref: '#/components/parameters/querySqlParam' + - $ref: '#/components/parameters/format' + - $ref: '#/components/parameters/AcceptQueryHeader' + - $ref: '#/components/parameters/ContentType' + responses: + '200': + description: Success. The response body contains query results. + content: + application/json: + schema: + $ref: '#/components/schemas/QueryResponse' + example: + results: + - series: + - name: mytable + columns: + - time + - value + values: + - - '2024-02-02T12:00:00Z' + - 42 + text/csv: + schema: + type: string + application/vnd.apache.parquet: + schema: + type: string + application/jsonl: + schema: + type: string + '400': + description: Bad request. + '401': + $ref: '#/components/responses/Unauthorized' + '403': + description: Access denied. + '404': + description: Database not found. + '405': + description: Method not allowed. + '422': + description: Unprocessable entity. + tags: + - Query data + post: + operationId: PostExecuteQuerySQL + summary: Execute SQL query + description: Executes an SQL query to retrieve data from the specified database. + parameters: + - $ref: '#/components/parameters/AcceptQueryHeader' + - $ref: '#/components/parameters/ContentType' + requestBody: + $ref: '#/components/requestBodies/queryRequestBody' + responses: + '200': + description: Success. The response body contains query results. + content: + application/json: + schema: + $ref: '#/components/schemas/QueryResponse' + text/csv: + schema: + type: string + application/vnd.apache.parquet: + schema: + type: string + application/jsonl: + schema: + type: string + '400': + description: Bad request. + '401': + $ref: '#/components/responses/Unauthorized' + '403': + description: Access denied. + '404': + description: Database not found. + '405': + description: Method not allowed. + '422': + description: Unprocessable entity. + tags: + - Query data + /api/v3/query_influxql: + get: + operationId: GetExecuteInfluxQLQuery + summary: Execute InfluxQL query + description: Executes an InfluxQL query to retrieve data from the specified database. + parameters: + - $ref: '#/components/parameters/dbQueryParam' + - name: q + in: query + required: true + schema: + type: string + - name: format + in: query + required: false + schema: + type: string + - $ref: '#/components/parameters/AcceptQueryHeader' + responses: + '200': + description: Success. The response body contains query results. + content: + application/json: + schema: + $ref: '#/components/schemas/QueryResponse' + text/csv: + schema: + type: string + application/vnd.apache.parquet: + schema: + type: string + application/jsonl: + schema: + type: string + '400': + description: Bad request. + '401': + $ref: '#/components/responses/Unauthorized' + '403': + description: Access denied. + '404': + description: Database not found. + '405': + description: Method not allowed. + '422': + description: Unprocessable entity. + tags: + - Query data + post: + operationId: PostExecuteQueryInfluxQL + summary: Execute InfluxQL query + description: Executes an InfluxQL query to retrieve data from the specified database. + parameters: + - $ref: '#/components/parameters/AcceptQueryHeader' + - $ref: '#/components/parameters/ContentType' + requestBody: + $ref: '#/components/requestBodies/queryRequestBody' + responses: + '200': + description: Success. The response body contains query results. + content: + application/json: + schema: + $ref: '#/components/schemas/QueryResponse' + text/csv: + schema: + type: string + application/vnd.apache.parquet: + schema: + type: string + application/jsonl: + schema: + type: string + '400': + description: Bad request. + '401': + $ref: '#/components/responses/Unauthorized' + '403': + description: Access denied. + '404': + description: Database not found. + '405': + description: Method not allowed. + '422': + description: Unprocessable entity. + tags: + - Query data + /query: + get: + operationId: GetV1ExecuteQuery + summary: Execute InfluxQL query (v1-compatible) + x-compatibility-version: v1 + description: | + Executes an InfluxQL query to retrieve data from the specified database. + Compatible with InfluxDB 1.x client libraries and third-party integrations such as Grafana. + externalDocs: + description: Use the InfluxDB v1 HTTP query API and InfluxQL to query data + url: /influxdb/version/query-data/execute-queries/influxdb-v1-api/ + parameters: + - name: Accept + in: header + schema: + type: string + default: application/json + enum: + - application/json + - application/csv + - text/csv + required: false + description: | + The content type that the client can understand. + + If `text/csv` is specified, the `Content-type` response header is `application/csv` and the response is formatted as CSV. + + Returns an error if the format is invalid or non-UTF8. + - in: query + name: chunked + description: | + If true, the response is divided into chunks of size `chunk_size`. + schema: + type: boolean + default: false + - in: query + name: chunk_size + description: | + The number of records that will go into a chunk. + This parameter is only used if `chunked=true`. + schema: + type: integer + default: 10000 + - in: query + name: db + description: The database to query. If not provided, the InfluxQL query string must specify the database. + schema: + type: string + format: InfluxQL + - in: query + name: pretty + description: | + If true, the JSON response is formatted in a human-readable format. + schema: + type: boolean + default: false + - in: query + name: q + description: The InfluxQL query string. + required: true + schema: + type: string + - name: epoch + description: | + Formats timestamps as [unix (epoch) timestamps](/influxdb/version/reference/glossary/#unix-timestamp) with the specified precision + instead of [RFC3339 timestamps](/influxdb/version/reference/glossary/#rfc3339-timestamp) with nanosecond precision. + in: query + schema: + $ref: '#/components/schemas/EpochCompatibility' + - $ref: '#/components/parameters/v1UsernameParam' + - $ref: '#/components/parameters/v1PasswordParam' + - name: rp + in: query + required: false + schema: + type: string + description: | + Retention policy name. Honored but discouraged. InfluxDB 3 doesn't use retention policies. + - name: Authorization + in: header + required: false + schema: + type: string + description: | + Authorization header for token-based authentication. + Supported schemes: + - `Bearer AUTH_TOKEN` - OAuth bearer token scheme + - `Token AUTH_TOKEN` - InfluxDB v2 token scheme + - `Basic ` - Basic authentication (username is ignored) + responses: + '200': + description: | + Success. The response body contains query results. + content: + application/json: + schema: + $ref: '#/components/schemas/QueryResponse' + application/csv: + schema: + type: string + headers: + Content-Type: + description: | + The content type of the response. + Default is `application/json`. + + If the `Accept` request header is `application/csv` or `text/csv`, the `Content-type` response header is `application/csv` + and the response is formatted as CSV. + schema: + type: string + default: application/json + enum: + - application/json + - application/csv + '400': + description: Bad request. + '401': + $ref: '#/components/responses/Unauthorized' + '403': + description: Access denied. + '404': + description: Database not found. + '405': + description: Method not allowed. + '422': + description: Unprocessable entity. + security: + - BearerAuthentication: [] + - TokenAuthentication: [] + - BasicAuthentication: [] + - QuerystringAuthentication: [] + tags: + - Query data + x-influxdata-guides: + - title: Use the InfluxDB v1 HTTP query API and InfluxQL to query data + href: /influxdb/version/query-data/execute-queries/influxdb-v1-api/ + post: + operationId: PostExecuteV1Query + summary: Execute InfluxQL query (v1-compatible) + x-compatibility-version: v1 + description: Executes an InfluxQL query to retrieve data from the specified database. + externalDocs: + description: Use the InfluxDB v1 HTTP query API and InfluxQL to query data + url: /influxdb/version/query-data/execute-queries/influxdb-v1-api/ + requestBody: + content: + application/json: + schema: + type: object + properties: + db: + type: string + description: The database to query. If not provided, the InfluxQL query string must specify the database. + q: + description: The InfluxQL query string. + type: string + chunked: + description: | + If true, the response is divided into chunks of size `chunk_size`. + type: boolean + chunk_size: + description: | + The number of records that will go into a chunk. + This parameter is only used if `chunked=true`. + type: integer + default: 10000 + epoch: + description: | + A unix timestamp precision. + + - `h` for hours + - `m` for minutes + - `s` for seconds + - `ms` for milliseconds + - `u` or `µ` for microseconds + - `ns` for nanoseconds + + Formats timestamps as [unix (epoch) timestamps](/influxdb/version/reference/glossary/#unix-timestamp) with the specified precision + instead of [RFC3339 timestamps](/influxdb/version/reference/glossary/#rfc3339-timestamp) with nanosecond precision. + enum: + - ns + - u + - µ + - ms + - s + - m + - h + type: string + pretty: + description: | + If true, the JSON response is formatted in a human-readable format. + type: boolean + required: + - q + parameters: + - name: Accept + in: header + schema: + type: string + default: application/json + enum: + - application/json + - application/csv + - text/csv + required: false + description: | + The content type that the client can understand. + + If `text/csv` is specified, the `Content-type` response header is `application/csv` and the response is formatted as CSV. + + Returns an error if the format is invalid or non-UTF8. + responses: + '200': + description: | + Success. The response body contains query results. + content: + application/json: + schema: + $ref: '#/components/schemas/QueryResponse' + application/csv: + schema: + type: string + headers: + Content-Type: + description: | + The content type of the response. + Default is `application/json`. + + If the `Accept` request header is `application/csv` or `text/csv`, the `Content-type` response header is `application/csv` + and the response is formatted as CSV. + schema: + type: string + default: application/json + enum: + - application/json + - application/csv + '400': + description: Bad request. + '401': + $ref: '#/components/responses/Unauthorized' + '403': + description: Access denied. + '404': + description: Database not found. + '405': + description: Method not allowed. + '422': + description: Unprocessable entity. + security: + - BearerAuthentication: [] + - TokenAuthentication: [] + - BasicAuthentication: [] + - QuerystringAuthentication: [] + tags: + - Query data + x-influxdata-guides: + - title: Use the InfluxDB v1 HTTP query API and InfluxQL to query data + href: /influxdb/version/query-data/execute-queries/influxdb-v1-api/ + /health: + get: + operationId: GetHealth + summary: Health check + description: Checks the status of the service. + responses: + '200': + description: Service is running. + '500': + description: Service is unavailable. + tags: + - Server information + /api/v1/health: + get: + operationId: GetHealthV1 + summary: Health check (v1-compatible) + x-compatibility-version: v1 + description: Checks the status of the service. + responses: + '200': + description: Service is running. + '500': + description: Service is unavailable. + tags: + - Server information + /ping: + get: + operationId: GetPing + tags: + - Server information + summary: Ping the server + description: Returns version information for the server. + responses: + '200': + description: Success. The response body contains server information. + content: + application/json: + schema: + example: + version: 0.1.0 + revision: f3d3d3d + /metrics: + get: + operationId: GetMetrics + summary: Metrics + description: Retrieves Prometheus-compatible server metrics. + responses: + '200': + description: Success. The response body contains Prometheus-compatible server metrics. + tags: + - Server information + /api/v3/configure/database: + get: + operationId: GetConfigureDatabase + summary: List databases + description: Retrieves a list of databases. + parameters: + - $ref: '#/components/parameters/formatRequired' + responses: + '200': + description: Success. The response body contains the list of databases. + content: + application/json: + schema: + $ref: '#/components/schemas/ShowDatabasesResponse' + '400': + description: Bad request. + '401': + $ref: '#/components/responses/Unauthorized' + '404': + description: Database not found. + tags: + - Database + post: + operationId: PostConfigureDatabase + summary: Create a database + description: Creates a new database in the system. + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/CreateDatabaseRequest' + responses: + '201': + description: Success. Database created. + '400': + description: Bad request. + '401': + $ref: '#/components/responses/Unauthorized' + '409': + description: Database already exists. + tags: + - Database + delete: + operationId: DeleteConfigureDatabase + summary: Delete a database + description: | + Soft deletes a database. + The database is scheduled for deletion and unavailable for querying. + Use the `hard_delete_at` parameter to schedule a hard deletion. + parameters: + - $ref: '#/components/parameters/db' + - name: hard_delete_at + in: query + required: false + schema: + type: string + format: date-time + description: | + Schedule the database for hard deletion at the specified time. + If not provided, the database will be soft deleted. + Use ISO 8601 date-time format (for example, "2025-12-31T23:59:59Z"). + + #### Deleting a database cannot be undone + + Deleting a database is a destructive action. + Once a database is deleted, data stored in that database cannot be recovered. + responses: + '200': + description: Success. Database deleted. + '401': + $ref: '#/components/responses/Unauthorized' + '404': + description: Database not found. + tags: + - Database + /api/v3/configure/database/retention_period: + delete: + operationId: DeleteDatabaseRetentionPeriod + summary: Remove database retention period + description: | + Removes the retention period from a database, setting it to infinite retention. + Data in the database will not expire based on time. + parameters: + - $ref: '#/components/parameters/db' + responses: + '200': + description: Success. Retention period removed from database. + '401': + $ref: '#/components/responses/Unauthorized' + '404': + description: Database not found. + tags: + - Database + /api/v3/configure/table: + post: + operationId: PostConfigureTable + summary: Create a table + description: Creates a new table within a database. + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/CreateTableRequest' + responses: + '201': + description: Success. The table has been created. + '400': + description: Bad request. + '401': + $ref: '#/components/responses/Unauthorized' + '404': + description: Database not found. + tags: + - Table + delete: + operationId: DeleteConfigureTable + summary: Delete a table + description: | + Soft deletes a table. + The table is scheduled for deletion and unavailable for querying. + Use the `hard_delete_at` parameter to schedule a hard deletion. + + #### Deleting a table cannot be undone + + Deleting a table is a destructive action. + Once a table is deleted, data stored in that table cannot be recovered. + parameters: + - $ref: '#/components/parameters/db' + - name: table + in: query + required: true + schema: + type: string + - name: hard_delete_at + in: query + required: false + schema: + type: string + format: date-time + description: | + Schedule the table for hard deletion at the specified time. + If not provided, the table will be soft deleted. + Use ISO 8601 format (for example, "2025-12-31T23:59:59Z"). + responses: + '200': + description: Success (no content). The table has been deleted. + '401': + $ref: '#/components/responses/Unauthorized' + '404': + description: Table not found. + tags: + - Table + /api/v3/configure/distinct_cache: + post: + operationId: PostConfigureDistinctCache + summary: Create distinct cache + description: Creates a distinct cache for a table. + tags: + - Cache data + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/DistinctCacheCreateRequest' + responses: + '201': + description: Success. The distinct cache has been created. + '204': + description: Not created. A distinct cache with this configuration already exists. + '400': + description: | + Bad request. + + The server responds with status `400` if the request would overwrite an existing cache with a different configuration. + delete: + operationId: DeleteConfigureDistinctCache + summary: Delete distinct cache + description: Deletes a distinct cache. + parameters: + - $ref: '#/components/parameters/db' + - name: table + in: query + required: true + schema: + type: string + description: The name of the table containing the distinct cache. + - name: name + in: query + required: true + schema: + type: string + description: The name of the distinct cache to delete. + responses: + '200': + description: Success. The distinct cache has been deleted. + '400': + description: Bad request. + '401': + $ref: '#/components/responses/Unauthorized' + '404': + description: Cache not found. + tags: + - Cache data + /api/v3/configure/last_cache: + post: + operationId: PostConfigureLastCache + summary: Create last cache + description: Creates a last cache for a table. + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/LastCacheCreateRequest' + responses: + '201': + description: Success. Last cache created. + '400': + description: Bad request. + '401': + $ref: '#/components/responses/Unauthorized' + '404': + description: Cache not found. + '409': + description: Cache already exists. + tags: + - Cache data + delete: + operationId: DeleteConfigureLastCache + summary: Delete last cache + description: Deletes a last cache. + parameters: + - $ref: '#/components/parameters/db' + - name: table + in: query + required: true + schema: + type: string + description: The name of the table containing the last cache. + - name: name + in: query + required: true + schema: + type: string + description: The name of the last cache to delete. + responses: + '200': + description: Success. The last cache has been deleted. + '400': + description: Bad request. + '401': + $ref: '#/components/responses/Unauthorized' + '404': + description: Cache not found. + tags: + - Cache data + /api/v3/configure/processing_engine_trigger: + post: + operationId: PostConfigureProcessingEngineTrigger + summary: Create processing engine trigger + description: | + Creates a processing engine trigger with the specified plugin file and trigger specification. + + ### Related guides + + - [Processing engine and Python plugins](/influxdb/version/plugins/) + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/ProcessingEngineTriggerRequest' + examples: + schedule_cron: + summary: Schedule trigger using cron + description: | + In `"cron:CRON_EXPRESSION"`, `CRON_EXPRESSION` uses extended 6-field cron format. + The cron expression `0 0 6 * * 1-5` means the trigger will run at 6:00 AM every weekday (Monday to Friday). + value: + db: mydb + plugin_filename: schedule.py + trigger_name: schedule_cron_trigger + trigger_specification: cron:0 0 6 * * 1-5 + trigger_settings: + run_async: false + error_behavior: Log + schedule_every: + summary: Schedule trigger using interval + description: | + In `"every:DURATION"`, `DURATION` specifies the interval between trigger executions. + The duration `1h` means the trigger will run every hour. + value: + db: mydb + plugin_filename: schedule.py + trigger_name: schedule_every_trigger + trigger_specification: every:1h + trigger_settings: + run_async: false + error_behavior: Log + schedule_every_seconds: + summary: Schedule trigger using seconds interval + description: | + Example of scheduling a trigger to run every 30 seconds. + value: + db: mydb + plugin_filename: schedule.py + trigger_name: schedule_every_30s_trigger + trigger_specification: every:30s + trigger_settings: + run_async: false + error_behavior: Log + schedule_every_minutes: + summary: Schedule trigger using minutes interval + description: | + Example of scheduling a trigger to run every 5 minutes. + value: + db: mydb + plugin_filename: schedule.py + trigger_name: schedule_every_5m_trigger + trigger_specification: every:5m + trigger_settings: + run_async: false + error_behavior: Log + all_tables: + summary: All tables trigger example + description: | + Trigger that fires on write events to any table in the database. + value: + db: mydb + plugin_filename: all_tables.py + trigger_name: all_tables_trigger + trigger_specification: all_tables + trigger_settings: + run_async: false + error_behavior: Log + table_specific: + summary: Table-specific trigger example + description: | + Trigger that fires on write events to a specific table. + value: + db: mydb + plugin_filename: table.py + trigger_name: table_trigger + trigger_specification: table:sensors + trigger_settings: + run_async: false + error_behavior: Log + api_request: + summary: On-demand request trigger example + description: | + Creates an HTTP endpoint `/api/v3/engine/hello-world` for manual invocation. + value: + db: mydb + plugin_filename: request.py + trigger_name: hello_world_trigger + trigger_specification: request:hello-world + trigger_settings: + run_async: false + error_behavior: Log + cron_friday_afternoon: + summary: Cron trigger for Friday afternoons + description: | + Example of a cron trigger that runs every Friday at 2:30 PM. + value: + db: reports + plugin_filename: weekly_report.py + trigger_name: friday_report_trigger + trigger_specification: cron:0 30 14 * * 5 + trigger_settings: + run_async: false + error_behavior: Log + cron_monthly: + summary: Cron trigger for monthly execution + description: | + Example of a cron trigger that runs on the first day of every month at midnight. + value: + db: monthly_data + plugin_filename: monthly_cleanup.py + trigger_name: monthly_cleanup_trigger + trigger_specification: cron:0 0 0 1 * * + trigger_settings: + run_async: false + error_behavior: Log + responses: + '200': + description: Success. Processing engine trigger created. + '400': + description: Bad request. + '401': + $ref: '#/components/responses/Unauthorized' + '404': + description: Trigger not found. + tags: + - Processing engine + delete: + operationId: DeleteConfigureProcessingEngineTrigger + summary: Delete processing engine trigger + description: Deletes a processing engine trigger. + parameters: + - $ref: '#/components/parameters/db' + - name: trigger_name + in: query + required: true + schema: + type: string + - name: force + in: query + required: false + schema: + type: boolean + default: false + responses: + '200': + description: Success. The processing engine trigger has been deleted. + '400': + description: Bad request. + '401': + $ref: '#/components/responses/Unauthorized' + '404': + description: Trigger not found. + tags: + - Processing engine + /api/v3/configure/processing_engine_trigger/disable: + post: + operationId: PostDisableProcessingEngineTrigger + summary: Disable processing engine trigger + description: Disables a processing engine trigger. + parameters: + - $ref: '#/components/parameters/ContentType' + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/ProcessingEngineTriggerRequest' + responses: + '200': + description: Success. The processing engine trigger has been disabled. + '400': + description: Bad request. + '401': + $ref: '#/components/responses/Unauthorized' + '404': + description: Trigger not found. + tags: + - Processing engine + /api/v3/configure/processing_engine_trigger/enable: + post: + operationId: PostEnableProcessingEngineTrigger + summary: Enable processing engine trigger + description: Enables a processing engine trigger. + parameters: + - $ref: '#/components/parameters/ContentType' + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/ProcessingEngineTriggerRequest' + responses: + '200': + description: Success. The processing engine trigger is enabled. + '400': + description: Bad request. + '401': + $ref: '#/components/responses/Unauthorized' + '404': + description: Trigger not found. + tags: + - Processing engine + /api/v3/configure/plugin_environment/install_packages: + post: + operationId: PostInstallPluginPackages + summary: Install plugin packages + description: | + Installs the specified Python packages into the processing engine plugin environment. + + This endpoint is synchronous and blocks until the packages are installed. + + ### Related guides + + - [Processing engine and Python plugins](/influxdb/version/plugins/) + parameters: + - $ref: '#/components/parameters/ContentType' + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + packages: + type: array + items: + type: string + description: | + A list of Python package names to install. + Can include version specifiers (e.g., "scipy==1.9.0"). + example: + - influxdb3-python + - scipy + - pandas==1.5.0 + - requests + required: + - packages + example: + packages: + - influxdb3-python + - scipy + - pandas==1.5.0 + - requests + responses: + '200': + description: Success. The packages are installed. + '400': + description: Bad request. + '401': + $ref: '#/components/responses/Unauthorized' + tags: + - Processing engine + /api/v3/configure/plugin_environment/install_requirements: + post: + operationId: PostInstallPluginRequirements + summary: Install plugin requirements + description: | + Installs requirements from a requirements file (also known as a "pip requirements file") into the processing engine plugin environment. + + This endpoint is synchronous and blocks until the requirements are installed. + + ### Related + + - [Processing engine and Python plugins](/influxdb/version/plugins/) + - [Python requirements file format](https://pip.pypa.io/en/stable/reference/requirements-file-format/) + parameters: + - $ref: '#/components/parameters/ContentType' + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + requirements_location: + type: string + description: | + The path to the requirements file containing Python packages to install. + Can be a relative path (relative to the plugin directory) or an absolute path. + example: requirements.txt + required: + - requirements_location + example: + requirements_location: requirements.txt + responses: + '200': + description: Success. The requirements have been installed. + '400': + description: Bad request. + '401': + $ref: '#/components/responses/Unauthorized' + tags: + - Processing engine + /api/v3/plugin_test/wal: + post: + operationId: PostTestWALPlugin + summary: Test WAL plugin + description: Executes a test of a write-ahead logging (WAL) plugin. + responses: + '200': + description: Success. The plugin test has been executed. + '400': + description: Bad request. + '401': + $ref: '#/components/responses/Unauthorized' + '404': + description: Plugin not enabled. + tags: + - Processing engine + /api/v3/plugin_test/schedule: + post: + operationId: PostTestSchedulingPlugin + summary: Test scheduling plugin + description: Executes a test of a scheduling plugin. + responses: + '200': + description: Success. The plugin test has been executed. + '400': + description: Bad request. + '401': + $ref: '#/components/responses/Unauthorized' + '404': + description: Plugin not enabled. + tags: + - Processing engine + /api/v3/engine/{request_path}: + parameters: + - name: request_path + description: | + The path configured in the request trigger specification for the plugin. + + For example, if you define a trigger with the following: + + ```json + trigger_specification: "request:hello-world" + ``` + + then, the HTTP API exposes the following plugin endpoint: + + ``` + /api/v3/engine/hello-world + ``` + in: path + required: true + schema: + type: string + get: + operationId: GetProcessingEnginePluginRequest + summary: On Request processing engine plugin request + description: | + Executes the On Request processing engine plugin specified in the trigger's `plugin_filename`. + The request can include request headers, query string parameters, and a request body, which InfluxDB passes to the plugin. + + An On Request plugin implements the following signature: + + ```python + def process_request(influxdb3_local, query_parameters, request_headers, request_body, args=None) + ``` + + The response depends on the plugin implementation. + responses: + '200': + description: Success. The plugin request has been executed. + '400': + description: Malformed request. + '401': + $ref: '#/components/responses/Unauthorized' + '404': + description: Plugin not found. + '500': + description: Processing failure. + tags: + - Processing engine + post: + operationId: PostProcessingEnginePluginRequest + summary: On Request processing engine plugin request + description: | + Executes the On Request processing engine plugin specified in the trigger's `plugin_filename`. + The request can include request headers, query string parameters, and a request body, which InfluxDB passes to the plugin. + + An On Request plugin implements the following signature: + + ```python + def process_request(influxdb3_local, query_parameters, request_headers, request_body, args=None) + ``` + + The response depends on the plugin implementation. + parameters: + - $ref: '#/components/parameters/ContentType' + requestBody: + required: false + content: + application/json: + schema: + type: object + additionalProperties: true + responses: + '200': + description: Success. The plugin request has been executed. + '400': + description: Malformed request. + '401': + $ref: '#/components/responses/Unauthorized' + '404': + description: Plugin not found. + '500': + description: Processing failure. + tags: + - Processing engine + /api/v3/configure/token/admin: + post: + operationId: PostCreateAdminToken + summary: Create admin token + description: | + Creates an admin token. + An admin token is a special type of token that has full access to all resources in the system. + responses: + '201': + description: | + Success. The admin token has been created. + The response body contains the token string and metadata. + content: + application/json: + schema: + $ref: '#/components/schemas/AdminTokenObject' + '401': + $ref: '#/components/responses/Unauthorized' + security: + - BearerAuthentication: [] + - {} # No auth required for initial token creation + tags: + - Auth token + /api/v3/configure/token/admin/regenerate: + post: + operationId: PostRegenerateAdminToken + summary: Regenerate admin token + description: | + Regenerates an admin token and revokes the previous token with the same name. + parameters: [] + responses: + '201': + description: Success. The admin token has been regenerated. + content: + application/json: + schema: + $ref: '#/components/schemas/AdminTokenObject' + '401': + $ref: '#/components/responses/Unauthorized' + tags: + - Authentication + - Auth token + /api/v3/configure/token: + delete: + operationId: DeleteToken + summary: Delete token + description: | + Deletes a token. + parameters: + - name: id + in: query + required: true + schema: + type: string + description: | + The ID of the token to delete. + responses: + '204': + description: Success. The token has been deleted. + '401': + $ref: '#/components/responses/Unauthorized' + '404': + description: Token not found. + tags: + - Authentication + - Auth token + /api/v3/configure/token/named_admin: + post: + operationId: PostCreateNamedAdminToken + summary: Create named admin token + description: | + Creates a named admin token. + A named admin token is an admin token with a specific name identifier. + parameters: + - name: name + in: query + required: true + schema: + type: string + description: | + The name for the admin token. + responses: + '201': + description: | + Success. The named admin token has been created. + The response body contains the token string and metadata. + content: + application/json: + schema: + $ref: '#/components/schemas/AdminTokenObject' + '401': + $ref: '#/components/responses/Unauthorized' + '409': + description: A token with this name already exists. + tags: + - Authentication + - Auth token + /api/v3/plugins/files: + put: + operationId: PutPluginFile + summary: Update plugin file + description: | + Updates a plugin file in the plugin directory. + x-security-note: Requires an admin token + responses: + '204': + description: Success. The plugin file has been updated. + '401': + $ref: '#/components/responses/Unauthorized' + '403': + description: Forbidden. Admin token required. + tags: + - Processing engine + /api/v3/plugins/directory: + put: + operationId: PutPluginDirectory + summary: Update plugin directory + description: | + Updates the plugin directory configuration. + x-security-note: Requires an admin token + responses: + '204': + description: Success. The plugin directory has been updated. + '401': + $ref: '#/components/responses/Unauthorized' + '403': + description: Forbidden. Admin token required. + tags: + - Processing engine +components: + parameters: + AcceptQueryHeader: + name: Accept + in: header + schema: + type: string + default: application/json + enum: + - application/json + - application/jsonl + - application/vnd.apache.parquet + - text/csv + required: false + description: | + The content type that the client can understand. + ContentEncoding: + name: Content-Encoding + in: header + description: | + The compression applied to the line protocol in the request payload. + To send a gzip payload, pass `Content-Encoding: gzip` header. + schema: + $ref: '#/components/schemas/ContentEncoding' + required: false + ContentLength: + name: Content-Length + in: header + description: | + The size of the entity-body, in bytes, sent to InfluxDB. + schema: + $ref: '#/components/schemas/ContentLength' + ContentType: + name: Content-Type + description: | + The format of the data in the request body. + in: header + schema: + type: string + enum: + - application/json + required: false + db: + name: db + in: query + required: true + schema: + type: string + description: | + The name of the database. + dbWriteParam: + name: db + in: query + required: true + schema: + type: string + description: | + The name of the database. + InfluxDB creates the database if it doesn't already exist, and then + writes all points in the batch to the database. + dbQueryParam: + name: db + in: query + required: false + schema: + type: string + description: | + The name of the database. + + If you provide a query that specifies the database, you can omit the 'db' parameter from your request. + accept_partial: + name: accept_partial + in: query + required: false + schema: + $ref: '#/components/schemas/AcceptPartial' + compatibilityPrecisionParam: + name: precision + in: query + required: true + schema: + $ref: '#/components/schemas/PrecisionWriteCompatibility' + description: The precision for unix timestamps in the line protocol batch. + precisionParam: + name: precision + in: query + required: true + schema: + $ref: '#/components/schemas/PrecisionWrite' + description: The precision for unix timestamps in the line protocol batch. + querySqlParam: + name: q + in: query + required: true + schema: + type: string + format: SQL + description: | + The query to execute. + format: + name: format + in: query + required: false + schema: + $ref: '#/components/schemas/Format' + formatRequired: + name: format + in: query + required: true + schema: + $ref: '#/components/schemas/Format' + v1UsernameParam: + name: u + in: query + required: false + schema: + type: string + description: | + Username for v1 compatibility authentication. + When using Basic authentication or query string authentication, InfluxDB 3 ignores this parameter but allows any arbitrary string for compatibility with InfluxDB 1.x clients. + v1PasswordParam: + name: p + in: query + required: false + schema: + type: string + description: | + Password for v1 compatibility authentication. + For query string authentication, pass an admin token. + InfluxDB 3 checks that the `p` value is an authorized token. + requestBodies: + lineProtocolRequestBody: + required: true + content: + text/plain: + schema: + type: string + examples: + line: + summary: Example line protocol + value: measurement,tag=value field=1 1234567890 + multiline: + summary: Example line protocol with UTF-8 characters + value: | + measurement,tag=value field=1 1234567890 + measurement,tag=value field=2 1234567900 + measurement,tag=value field=3 1234568000 + queryRequestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/QueryRequestObject' + schemas: + AdminTokenObject: + type: object + properties: + id: + type: integer + name: + type: string + token: + type: string + hash: + type: string + created_at: + type: string + format: date-time + expiry: + format: date-time + example: + id: 0 + name: _admin + token: apiv3_00xx0Xx0xx00XX0x0 + hash: 00xx0Xx0xx00XX0x0 + created_at: '2025-04-18T14:02:45.331Z' + expiry: null + ContentEncoding: + type: string + enum: + - gzip + - identity + description: | + Content coding. + Use `gzip` for compressed data or `identity` for unmodified, uncompressed data. + + #### Multi-member gzip support + + InfluxDB 3 supports multi-member gzip payloads (concatenated gzip files per [RFC 1952](https://www.rfc-editor.org/rfc/rfc1952)). + This allows you to: + - Concatenate multiple gzip files and send them in a single request + - Maintain compatibility with InfluxDB v1 and v2 write endpoints + - Simplify batch operations using standard compression tools + default: identity + LineProtocol: + type: string + enum: + - text/plain + - text/plain; charset=utf-8 + description: | + `text/plain` is the content type for line protocol. `UTF-8` is the default character set. + default: text/plain; charset=utf-8 + ContentLength: + type: integer + description: The length in decimal number of octets. + Database: + type: string + AcceptPartial: + type: boolean + default: true + description: Accept partial writes. + Format: + type: string + enum: + - json + - csv + - parquet + - jsonl + description: | + The format of data in the response body. + NoSync: + type: boolean + default: false + description: | + Acknowledges a successful write without waiting for WAL persistence. + + #### Related + + - [Use the HTTP API and client libraries to write data](/influxdb/version/write-data/api-client-libraries/) + - [Data durability](/influxdb/version/reference/internals/durability/) + PrecisionWriteCompatibility: + enum: + - ms + - s + - us + - ns + type: string + description: | + The precision for unix timestamps in the line protocol batch. + Use `ms` for milliseconds, `s` for seconds, `us` for microseconds, or `ns` for nanoseconds. + PrecisionWrite: + enum: + - auto + - nanosecond + - microsecond + - millisecond + - second + type: string + description: | + The precision for unix timestamps in the line protocol batch. + + Supported values: + - `auto` (default): Automatically detects precision based on timestamp magnitude + - `nanosecond`: Nanoseconds + - `microsecond`: Microseconds + - `millisecond`: Milliseconds + - `second`: Seconds + QueryRequestObject: + type: object + properties: + db: + description: | + The name of the database to query. + Required if the query (`q`) doesn't specify the database. + type: string + q: + description: The query to execute. + type: string + format: + description: The format of the query results. + type: string + enum: + - json + - csv + - parquet + - jsonl + - pretty + params: + description: | + Additional parameters for the query. + Use this field to pass query parameters. + type: object + additionalProperties: true + required: + - db + - q + example: + db: mydb + q: SELECT * FROM mytable + format: json + params: {} + CreateDatabaseRequest: + type: object + properties: + db: + type: string + retention_period: + type: string + description: | + The retention period for the database. Specifies how long data should be retained. + Use duration format (for example, "1d", "1h", "30m", "7d"). + example: 7d + required: + - db + CreateTableRequest: + type: object + properties: + db: + type: string + table: + type: string + tags: + type: array + items: + type: string + fields: + type: array + items: + type: object + properties: + name: + type: string + type: + type: string + enum: + - utf8 + - int64 + - uint64 + - float64 + - bool + required: + - name + - type + required: + - db + - table + - tags + DistinctCacheCreateRequest: + type: object + properties: + db: + type: string + table: + type: string + name: + type: string + description: Optional cache name. + columns: + type: array + items: + type: string + max_cardinality: + type: integer + description: Optional maximum cardinality. + max_age: + type: integer + description: Optional maximum age in seconds. + required: + - db + - table + - columns + example: + db: mydb + table: mytable + columns: + - tag1 + - tag2 + max_cardinality: 1000 + max_age: 3600 + LastCacheCreateRequest: + type: object + properties: + db: + type: string + table: + type: string + name: + type: string + description: Optional cache name. + key_columns: + type: array + items: + type: string + description: Optional list of key columns. + value_columns: + type: array + items: + type: string + description: Optional list of value columns. + count: + type: integer + description: Optional count. + ttl: + type: integer + description: Optional time-to-live in seconds. + required: + - db + - table + example: + db: mydb + table: mytable + key_columns: + - tag1 + value_columns: + - field1 + count: 100 + ttl: 3600 + ProcessingEngineTriggerRequest: + type: object + properties: + db: + type: string + plugin_filename: + type: string + description: | + The path and filename of the plugin to execute--for example, + `schedule.py` or `endpoints/report.py`. + The path can be absolute or relative to the `--plugins-dir` directory configured when starting InfluxDB 3. + + The plugin file must implement the trigger interface associated with the trigger's specification. + trigger_name: + type: string + trigger_settings: + description: | + Configuration for trigger error handling and execution behavior. + allOf: + - $ref: '#/components/schemas/TriggerSettings' + trigger_specification: + type: string + description: | + Specifies when and how the processing engine trigger should be invoked. + + ## Supported trigger specifications: + + ### Cron-based scheduling + Format: `cron:CRON_EXPRESSION` + + Uses extended (6-field) cron format (second minute hour day_of_month month day_of_week): + ``` + ┌───────────── second (0-59) + │ ┌───────────── minute (0-59) + │ │ ┌───────────── hour (0-23) + │ │ │ ┌───────────── day of month (1-31) + │ │ │ │ ┌───────────── month (1-12) + │ │ │ │ │ ┌───────────── day of week (0-6, Sunday=0) + │ │ │ │ │ │ + * * * * * * + ``` + Examples: + - `cron:0 0 6 * * 1-5` - Every weekday at 6:00 AM + - `cron:0 30 14 * * 5` - Every Friday at 2:30 PM + - `cron:0 0 0 1 * *` - First day of every month at midnight + + ### Interval-based scheduling + Format: `every:DURATION` + + Supported durations: `s` (seconds), `m` (minutes), `h` (hours), `d` (days), `w` (weeks), `M` (months), `y` (years): + - `every:30s` - Every 30 seconds + - `every:5m` - Every 5 minutes + - `every:1h` - Every hour + - `every:1d` - Every day + - `every:1w` - Every week + - `every:1M` - Every month + - `every:1y` - Every year + + **Maximum interval**: 1 year + + ### Table-based triggers + - `all_tables` - Triggers on write events to any table in the database + - `table:TABLE_NAME` - Triggers on write events to a specific table + + ### On-demand triggers + Format: `request:REQUEST_PATH` + + Creates an HTTP endpoint `/api/v3/engine/REQUEST_PATH` for manual invocation: + - `request:hello-world` - Creates endpoint `/api/v3/engine/hello-world` + - `request:data-export` - Creates endpoint `/api/v3/engine/data-export` + pattern: ^(cron:[0-9 *,/-]+|every:[0-9]+[smhd]|all_tables|table:[a-zA-Z_][a-zA-Z0-9_]*|request:[a-zA-Z0-9_-]+)$ + example: cron:0 0 6 * * 1-5 + trigger_arguments: + type: object + additionalProperties: true + description: Optional arguments passed to the plugin. + disabled: + type: boolean + default: false + description: Whether the trigger is disabled. + required: + - db + - plugin_filename + - trigger_name + - trigger_settings + - trigger_specification + TriggerSettings: + type: object + description: | + Configuration settings for processing engine trigger error handling and execution behavior. + properties: + run_async: + type: boolean + default: false + description: | + Whether to run the trigger asynchronously. + When `true`, the trigger executes in the background without blocking. + When `false`, the trigger executes synchronously. + error_behavior: + type: string + enum: + - Log + - Retry + - Disable + description: | + Specifies how to handle errors that occur during trigger execution: + - `Log`: Log the error and continue (default) + - `Retry`: Retry the trigger execution + - `Disable`: Disable the trigger after an error + default: Log + required: + - run_async + - error_behavior + ShowDatabasesResponse: + type: object + properties: + databases: + type: array + items: + type: string + QueryResponse: + type: object + properties: + results: + type: array + items: + type: object + example: + results: + - series: + - name: mytable + columns: + - time + - value + values: + - - '2024-02-02T12:00:00Z' + - 42 + ErrorMessage: + type: object + properties: + error: + type: string + data: + type: object + nullable: true + LineProtocolError: + properties: + code: + description: Code is the machine-readable error code. + enum: + - internal error + - not found + - conflict + - invalid + - empty value + - unavailable + readOnly: true + type: string + err: + description: Stack of errors that occurred during processing of the request. Useful for debugging. + readOnly: true + type: string + line: + description: First line in the request body that contains malformed data. + format: int32 + readOnly: true + type: integer + message: + description: Human-readable message. + readOnly: true + type: string + op: + description: Describes the logical code operation when the error occurred. Useful for debugging. + readOnly: true + type: string + required: + - code + EpochCompatibility: + description: | + A unix timestamp precision. + - `h` for hours + - `m` for minutes + - `s` for seconds + - `ms` for milliseconds + - `u` or `µ` for microseconds + - `ns` for nanoseconds + enum: + - ns + - u + - µ + - ms + - s + - m + - h + type: string + UpdateDatabaseRequest: + type: object + properties: + retention_period: + type: string + description: | + The retention period for the database. Specifies how long data should be retained. + Use duration format (for example, "1d", "1h", "30m", "7d"). + example: 7d + description: Request schema for updating database configuration. + UpdateTableRequest: + type: object + properties: + db: + type: string + description: The name of the database containing the table. + table: + type: string + description: The name of the table to update. + retention_period: + type: string + description: | + The retention period for the table. Specifies how long data in this table should be retained. + Use duration format (for example, "1d", "1h", "30m", "7d"). + example: 30d + required: + - db + - table + description: Request schema for updating table configuration. + LicenseResponse: + type: object + properties: + license_type: + type: string + description: The type of license (for example, "enterprise", "trial"). + example: enterprise + expires_at: + type: string + format: date-time + description: The expiration date of the license in ISO 8601 format. + example: '2025-12-31T23:59:59Z' + features: + type: array + items: + type: string + description: List of features enabled by the license. + example: + - clustering + - processing_engine + - advanced_auth + status: + type: string + enum: + - active + - expired + - invalid + description: The current status of the license. + example: active + description: Response schema for license information. + ResourceTokenObject: + type: object + properties: + token_name: + type: string + permissions: + type: array + items: + type: object + properties: + resource_type: + type: string + enum: + - system + - db + resource_identifier: + type: array + items: + type: string + actions: + type: array + items: + type: string + enum: + - read + - write + expiry_secs: + type: integer + description: The expiration time in seconds. + description: Response schema for resource token creation. + responses: + Unauthorized: + description: Unauthorized access. + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorMessage' + BadRequest: + description: | + Request failed. Possible reasons: + + - Invalid database name + - Malformed request body + - Invalid timestamp precision + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorMessage' + Forbidden: + description: Access denied. + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorMessage' + NotFound: + description: Resource not found. + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorMessage' + headers: + ClusterUUID: + description: | + The catalog UUID of the InfluxDB instance. + This header is included in all HTTP API responses and enables you to: + - Identify which cluster instance handled the request + - Monitor deployments across multiple InfluxDB instances + - Debug and troubleshoot distributed systems + schema: + type: string + format: uuid + example: 01234567-89ab-cdef-0123-456789abcdef + securitySchemes: + BasicAuthentication: + type: http + scheme: basic + description: | + Use the `Authorization` header with the `Basic` scheme to authenticate v1 API requests. + + Works with v1 compatibility [`/write`](#operation/PostV1Write) and [`/query`](#operation/GetV1Query) endpoints in InfluxDB 3. + + When authenticating requests, InfluxDB 3 checks that the `password` part of the decoded credential is an authorized token + and ignores the `username` part of the decoded credential. + + ### Example + + ```bash + curl "http://localhost:8181/write?db=DATABASE_NAME&precision=s" \ + --user "":"AUTH_TOKEN" \ + --header "Content-type: text/plain; charset=utf-8" \ + --data-binary 'home,room=kitchen temp=72 1641024000' + ``` + + Replace the following: + + - **`DATABASE_NAME`**: your InfluxDB 3 database + - **`AUTH_TOKEN`**: an admin token + + #### Related guides + + - [Authenticate v1 API requests](/influxdb/version/guides/api-compatibility/v1/) + - [Manage tokens](/influxdb/version/admin/tokens/) + QuerystringAuthentication: + type: apiKey + in: query + name: u=&p= + description: | + Use InfluxDB 1.x API parameters to provide credentials through the query string for v1 API requests. + + Querystring authentication works with v1-compatible [`/write`](#operation/PostV1Write) and [`/query`](#operation/GetV1Query) endpoints. + + When authenticating requests, InfluxDB 3 checks that the `p` (_password_) query parameter is an authorized token + and ignores the `u` (_username_) query parameter. + + ### Syntax + + ```http + http://localhost:8181/query/?[u=any]&p=DATABASE_TOKEN + http://localhost:8181/write/?[u=any]&p=DATABASE_TOKEN + ``` + + ### Examples + + ```bash + curl "http://localhost:8181/write?db=DATABASE_NAME&precision=s&p=AUTH_TOKEN" \ + --header "Content-type: text/plain; charset=utf-8" \ + --data-binary 'home,room=kitchen temp=72 1641024000' + ``` + + Replace the following: + + - **`DATABASE_NAME`**: your InfluxDB 3 database + - **`AUTH_TOKEN`**: an admin token + + ```bash + ####################################### + # Use an InfluxDB 1.x compatible username and password + # to query the InfluxDB v1 HTTP API + ####################################### + # Use authentication query parameters: + # ?p=DATABASE_TOKEN + ####################################### + + curl --get "http://localhost:8181/query" \ + --data-urlencode "p=AUTH_TOKEN" \ + --data-urlencode "db=DATABASE_NAME" \ + --data-urlencode "q=SELECT * FROM MEASUREMENT" + ``` + + Replace the following: + + - **`DATABASE_NAME`**: the database to query + - **`AUTH_TOKEN`**: an [admin token](/influxdb/version/admin/tokens/) + + #### Related guides + + - [Authenticate v1 API requests](/influxdb/version/guides/api-compatibility/v1/) + - [Manage tokens](/influxdb/version/admin/tokens/) + BearerAuthentication: + type: http + scheme: bearer + bearerFormat: JWT + description: | + + Use the OAuth Bearer authentication + scheme to provide an authorization token to InfluxDB 3. + + Bearer authentication works with all endpoints. + + In your API requests, send an `Authorization` header. + For the header value, provide the word `Bearer` followed by a space and an admin token. + + + ### Syntax + + ```http + Authorization: Bearer AUTH_TOKEN + ``` + + ### Example + + ```bash + curl http://localhost:8181/api/v3/query_influxql \ + --header "Authorization: Bearer AUTH_TOKEN" + ``` + TokenAuthentication: + description: | + Use InfluxDB v2 Token authentication to provide an authorization token to InfluxDB 3. + + The v2 Token scheme works with v1 and v2 compatibility endpoints in InfluxDB 3. + + In your API requests, send an `Authorization` header. + For the header value, provide the word `Token` followed by a space and a database token. + The word `Token` is case-sensitive. + + ### Syntax + + ```http + Authorization: Token AUTH_TOKEN + ``` + + ### Example + + ```sh + ######################################################## + # Use the Token authentication scheme with /api/v2/write + # to write data. + ######################################################## + + curl --request post "http://localhost:8181/api/v2/write?bucket=DATABASE_NAME&precision=s" \ + --header "Authorization: Token AUTH_TOKEN" \ + --data-binary 'home,room=kitchen temp=72 1463683075' + ``` + + ### Related guides + + - [Manage tokens](/influxdb/version/admin/tokens/) + in: header + name: Authorization + type: apiKey diff --git a/api-docs/scripts/apply-overlay.js b/api-docs/scripts/apply-overlay.js new file mode 100644 index 0000000000..2f57fa68ea --- /dev/null +++ b/api-docs/scripts/apply-overlay.js @@ -0,0 +1,252 @@ +#!/usr/bin/env node +/** + * Apply OpenAPI Overlay to Base Spec + * + * Implements a subset of the OpenAPI Overlay Specification v1.0.0 + * to merge product-specific overlays onto a shared base spec. + * + * Supported overlay actions: + * - target: $.info.title (update info title) + * - target: $.info.description (update info description) + * - target: $.servers[0].description (update server description) + * - target: $.servers[0].variables.*.description (update variable description) + * - target: $.paths['/path'].method (add/update operation) + * - target: $.paths['/path'] (add entire path) + * + * Usage: + * node apply-overlay.js -o + * + * @module apply-overlay + */ + +const fs = require('fs'); +const yaml = require('js-yaml'); +const path = require('path'); + +/** + * Parse a JSONPath-like target string + * @param {string} target - JSONPath expression (e.g., "$.info.title") + * @returns {string[]} - Path segments + */ +function parseTarget(target) { + // Remove leading $. and split by . or bracket notation + const cleaned = target.replace(/^\$\.?/, ''); + const segments = []; + let current = ''; + let inBracket = false; + + for (let i = 0; i < cleaned.length; i++) { + const char = cleaned[i]; + if (char === '[' && !inBracket) { + if (current) segments.push(current); + current = ''; + inBracket = true; + } else if (char === ']' && inBracket) { + // Remove quotes from bracket content + segments.push(current.replace(/^['"]|['"]$/g, '')); + current = ''; + inBracket = false; + } else if (char === '.' && !inBracket) { + if (current) segments.push(current); + current = ''; + } else { + current += char; + } + } + if (current) segments.push(current); + + return segments; +} + +/** + * Get a value from an object using path segments + * @param {object} obj - Source object + * @param {string[]} segments - Path segments + * @returns {*} - Value at path + */ +function getPath(obj, segments) { + let current = obj; + for (const segment of segments) { + if (current === undefined || current === null) return undefined; + // Handle array index + if (/^\d+$/.test(segment)) { + current = current[parseInt(segment, 10)]; + } else { + current = current[segment]; + } + } + return current; +} + +/** + * Set a value in an object using path segments + * @param {object} obj - Target object + * @param {string[]} segments - Path segments + * @param {*} value - Value to set + */ +function setPath(obj, segments, value) { + let current = obj; + for (let i = 0; i < segments.length - 1; i++) { + const segment = segments[i]; + const nextSegment = segments[i + 1]; + + // Handle array index + if (/^\d+$/.test(segment)) { + const idx = parseInt(segment, 10); + if (current[idx] === undefined) { + current[idx] = /^\d+$/.test(nextSegment) ? [] : {}; + } + current = current[idx]; + } else { + if (current[segment] === undefined) { + current[segment] = /^\d+$/.test(nextSegment) ? [] : {}; + } + current = current[segment]; + } + } + + const lastSegment = segments[segments.length - 1]; + if (/^\d+$/.test(lastSegment)) { + current[parseInt(lastSegment, 10)] = value; + } else { + current[lastSegment] = value; + } +} + +/** + * Deep merge two objects + * @param {object} target - Target object + * @param {object} source - Source object to merge + * @returns {object} - Merged object + */ +function deepMerge(target, source) { + if (typeof source !== 'object' || source === null) { + return source; + } + if (typeof target !== 'object' || target === null) { + return source; + } + + const result = { ...target }; + for (const key of Object.keys(source)) { + if ( + typeof source[key] === 'object' && + source[key] !== null && + !Array.isArray(source[key]) + ) { + result[key] = deepMerge(result[key], source[key]); + } else { + result[key] = source[key]; + } + } + return result; +} + +/** + * Apply overlay actions to base spec + * @param {object} base - Base OpenAPI spec + * @param {object} overlay - Overlay spec with actions + * @returns {object} - Merged spec + */ +function applyOverlay(base, overlay) { + const result = JSON.parse(JSON.stringify(base)); // Deep clone + + if (!overlay.actions || !Array.isArray(overlay.actions)) { + console.warn('Warning: No actions found in overlay'); + return result; + } + + for (const action of overlay.actions) { + if (!action.target) { + console.warn('Warning: Action missing target, skipping'); + continue; + } + + const segments = parseTarget(action.target); + + if (action.update !== undefined) { + // Get existing value at path + const existing = getPath(result, segments); + + if ( + existing !== undefined && + typeof existing === 'object' && + typeof action.update === 'object' + ) { + // Merge objects + setPath(result, segments, deepMerge(existing, action.update)); + } else { + // Replace value + setPath(result, segments, action.update); + } + + console.log(`Applied: ${action.target}`); + } else if (action.remove === true) { + // Remove is not implemented yet + console.warn( + `Warning: remove action not implemented for ${action.target}` + ); + } + } + + return result; +} + +/** + * Main function + */ +function main() { + const args = process.argv.slice(2); + + // Parse arguments + let baseFile = null; + let overlayFile = null; + let outputFile = null; + + for (let i = 0; i < args.length; i++) { + if (args[i] === '-o' || args[i] === '--output') { + outputFile = args[++i]; + } else if (!baseFile) { + baseFile = args[i]; + } else if (!overlayFile) { + overlayFile = args[i]; + } + } + + if (!baseFile || !overlayFile) { + console.error( + 'Usage: node apply-overlay.js -o ' + ); + process.exit(1); + } + + // Read files + console.log(`Base: ${baseFile}`); + console.log(`Overlay: ${overlayFile}`); + + const baseContent = fs.readFileSync(baseFile, 'utf8'); + const overlayContent = fs.readFileSync(overlayFile, 'utf8'); + + const base = yaml.load(baseContent); + const overlay = yaml.load(overlayContent); + + // Apply overlay + const result = applyOverlay(base, overlay); + + // Output + const outputYaml = yaml.dump(result, { + lineWidth: -1, // Don't wrap lines + noRefs: true, // Don't use YAML references + quotingType: "'", + forceQuotes: false, + }); + + if (outputFile) { + fs.writeFileSync(outputFile, outputYaml); + console.log(`Output: ${outputFile}`); + } else { + console.log(outputYaml); + } +} + +main(); diff --git a/data/article_data/influxdb/influxdb3_core/articles.json b/data/article_data/influxdb/influxdb3_core/articles.json index 4f9decca76..c010a18cf5 100644 --- a/data/article_data/influxdb/influxdb3_core/articles.json +++ b/data/article_data/influxdb/influxdb3_core/articles.json @@ -268,7 +268,7 @@ "/api/v3/plugins/directory" ], "title": "Processing engine", - "description": "Manage Processing engine triggers, test plugins, and send requests to trigger On Request plugins.\n\nInfluxDB 3 Core provides the InfluxDB 3 processing engine, an embedded Python VM that can dynamically load and trigger Python plugins in response to events in your database.\nUse Processing engine plugins and triggers to run code and perform tasks for different database events.\n\nTo get started with the processing engine, see the [Processing engine and Python plugins](/influxdb3/core/processing-engine/) guide.\n", + "description": "Manage Processing engine triggers, test plugins, and send requests to trigger On Request plugins.\n\nInfluxDB 3 provides the InfluxDB 3 processing engine, an embedded Python VM that can dynamically load and trigger Python plugins in response to events in your database.\nUse Processing engine plugins and triggers to run code and perform tasks for different database events.\n\nTo get started with the processing engine, see the [Processing engine and Python plugins](/influxdb3/core/processing-engine/) guide.\n", "tag": "Processing engine", "isConceptual": false, "menuGroup": "Processing Engine", @@ -382,7 +382,7 @@ ] } ], - "tagDescription": "Manage Processing engine triggers, test plugins, and send requests to trigger On Request plugins.\n\nInfluxDB 3 Core provides the InfluxDB 3 processing engine, an embedded Python VM that can dynamically load and trigger Python plugins in response to events in your database.\nUse Processing engine plugins and triggers to run code and perform tasks for different database events.\n\nTo get started with the processing engine, see the [Processing engine and Python plugins](/influxdb3/core/processing-engine/) guide.\n", + "tagDescription": "Manage Processing engine triggers, test plugins, and send requests to trigger On Request plugins.\n\nInfluxDB 3 provides the InfluxDB 3 processing engine, an embedded Python VM that can dynamically load and trigger Python plugins in response to events in your database.\nUse Processing engine plugins and triggers to run code and perform tasks for different database events.\n\nTo get started with the processing engine, see the [Processing engine and Python plugins](/influxdb3/core/processing-engine/) guide.\n", "source": "static/openapi/influxdb3-core/tags/tags/influxdb3-core-processing-engine.yaml", "staticFilePath": "/openapi/influxdb3-core/tags/tags/influxdb3-core-processing-engine.yaml" } @@ -449,7 +449,7 @@ "compatVersion": "v1", "externalDocs": { "description": "Use the InfluxDB v1 HTTP query API and InfluxQL to query data", - "url": "/influxdb3/core/query-data/execute-queries/influxdb-v1-api/" + "url": "/influxdb/version/query-data/execute-queries/influxdb-v1-api/" } }, { @@ -463,13 +463,13 @@ "compatVersion": "v1", "externalDocs": { "description": "Use the InfluxDB v1 HTTP query API and InfluxQL to query data", - "url": "/influxdb3/core/query-data/execute-queries/influxdb-v1-api/" + "url": "/influxdb/version/query-data/execute-queries/influxdb-v1-api/" } } ], "tagDescription": "Query data using SQL or InfluxQL", "related": [ - "/influxdb3/core/query-data/execute-queries/influxdb-v1-api/" + "/influxdb/version/query-data/execute-queries/influxdb-v1-api/" ], "source": "static/openapi/influxdb3-core/tags/tags/influxdb3-core-query-data.yaml", "staticFilePath": "/openapi/influxdb3-core/tags/tags/influxdb3-core-query-data.yaml" @@ -481,12 +481,12 @@ "name": "Quick start", "describes": [], "title": "Quick start", - "description": "1. [Create an admin token](#section/Authentication) to authorize API requests.\n\n ```bash\n curl -X POST \"http://localhost:8181/api/v3/configure/token/admin\"\n ```\n2. [Check the status](#section/Server-information) of the InfluxDB server.\n\n ```bash\n curl \"http://localhost:8181/health\" \\\n --header \"Authorization: Bearer ADMIN_TOKEN\"\n ```\n\n3. [Write data](#operation/PostWriteLP) to InfluxDB.\n\n ```bash\n curl \"http://localhost:8181/api/v3/write_lp?db=sensors&precision=auto\"\n --header \"Authorization: Bearer ADMIN_TOKEN\" \\\n --data-raw \"home,room=Kitchen temp=72.0\n home,room=Living\\ room temp=71.5\"\n ```\n\n If all data is written, the response is `204 No Content`.\n\n4. [Query data](#operation/GetExecuteQuerySQL) from InfluxDB.\n\n ```bash\n curl -G \"http://localhost:8181/api/v3/query_sql\" \\\n --header \"Authorization: Bearer ADMIN_TOKEN\" \\\n --data-urlencode \"db=sensors\" \\\n --data-urlencode \"q=SELECT * FROM home WHERE room='Living room'\" \\\n --data-urlencode \"format=jsonl\"\n ```\n \n Output:\n\n ```jsonl\n {\"room\":\"Living room\",\"temp\":71.5,\"time\":\"2025-02-25T20:19:34.984098\"}\n ```\n \nFor more information about using InfluxDB 3 Core, see the [Get started](/influxdb3/core/get-started/) guide.\n", + "description": "1. [Create an admin token](#section/Authentication) to authorize API requests.\n\n ```bash\n curl -X POST \"http://localhost:8181/api/v3/configure/token/admin\"\n ```\n2. [Check the status](#section/Server-information) of the InfluxDB server.\n\n ```bash\n curl \"http://localhost:8181/health\" \\\n --header \"Authorization: Bearer ADMIN_TOKEN\"\n ```\n\n3. [Write data](#operation/PostWriteLP) to InfluxDB.\n\n ```bash\n curl \"http://localhost:8181/api/v3/write_lp?db=sensors&precision=auto\"\n --header \"Authorization: Bearer ADMIN_TOKEN\" \\\n --data-raw \"home,room=Kitchen temp=72.0\n home,room=Living\\ room temp=71.5\"\n ```\n\n If all data is written, the response is `204 No Content`.\n\n4. [Query data](#operation/GetExecuteQuerySQL) from InfluxDB.\n\n ```bash\n curl -G \"http://localhost:8181/api/v3/query_sql\" \\\n --header \"Authorization: Bearer ADMIN_TOKEN\" \\\n --data-urlencode \"db=sensors\" \\\n --data-urlencode \"q=SELECT * FROM home WHERE room='Living room'\" \\\n --data-urlencode \"format=jsonl\"\n ```\n \n Output:\n\n ```jsonl\n {\"room\":\"Living room\",\"temp\":71.5,\"time\":\"2025-02-25T20:19:34.984098\"}\n ```\n \nFor more information about using InfluxDB 3, see the [Get started](/influxdb3/core/get-started/) guide.\n", "tag": "Quick start", "isConceptual": true, "menuGroup": "Concepts", "operations": [], - "tagDescription": "1. [Create an admin token](#section/Authentication) to authorize API requests.\n\n ```bash\n curl -X POST \"http://localhost:8181/api/v3/configure/token/admin\"\n ```\n2. [Check the status](#section/Server-information) of the InfluxDB server.\n\n ```bash\n curl \"http://localhost:8181/health\" \\\n --header \"Authorization: Bearer ADMIN_TOKEN\"\n ```\n\n3. [Write data](#operation/PostWriteLP) to InfluxDB.\n\n ```bash\n curl \"http://localhost:8181/api/v3/write_lp?db=sensors&precision=auto\"\n --header \"Authorization: Bearer ADMIN_TOKEN\" \\\n --data-raw \"home,room=Kitchen temp=72.0\n home,room=Living\\ room temp=71.5\"\n ```\n\n If all data is written, the response is `204 No Content`.\n\n4. [Query data](#operation/GetExecuteQuerySQL) from InfluxDB.\n\n ```bash\n curl -G \"http://localhost:8181/api/v3/query_sql\" \\\n --header \"Authorization: Bearer ADMIN_TOKEN\" \\\n --data-urlencode \"db=sensors\" \\\n --data-urlencode \"q=SELECT * FROM home WHERE room='Living room'\" \\\n --data-urlencode \"format=jsonl\"\n ```\n \n Output:\n\n ```jsonl\n {\"room\":\"Living room\",\"temp\":71.5,\"time\":\"2025-02-25T20:19:34.984098\"}\n ```\n \nFor more information about using InfluxDB 3 Core, see the [Get started](/influxdb3/core/get-started/) guide.\n", + "tagDescription": "1. [Create an admin token](#section/Authentication) to authorize API requests.\n\n ```bash\n curl -X POST \"http://localhost:8181/api/v3/configure/token/admin\"\n ```\n2. [Check the status](#section/Server-information) of the InfluxDB server.\n\n ```bash\n curl \"http://localhost:8181/health\" \\\n --header \"Authorization: Bearer ADMIN_TOKEN\"\n ```\n\n3. [Write data](#operation/PostWriteLP) to InfluxDB.\n\n ```bash\n curl \"http://localhost:8181/api/v3/write_lp?db=sensors&precision=auto\"\n --header \"Authorization: Bearer ADMIN_TOKEN\" \\\n --data-raw \"home,room=Kitchen temp=72.0\n home,room=Living\\ room temp=71.5\"\n ```\n\n If all data is written, the response is `204 No Content`.\n\n4. [Query data](#operation/GetExecuteQuerySQL) from InfluxDB.\n\n ```bash\n curl -G \"http://localhost:8181/api/v3/query_sql\" \\\n --header \"Authorization: Bearer ADMIN_TOKEN\" \\\n --data-urlencode \"db=sensors\" \\\n --data-urlencode \"q=SELECT * FROM home WHERE room='Living room'\" \\\n --data-urlencode \"format=jsonl\"\n ```\n \n Output:\n\n ```jsonl\n {\"room\":\"Living room\",\"temp\":71.5,\"time\":\"2025-02-25T20:19:34.984098\"}\n ```\n \nFor more information about using InfluxDB 3, see the [Get started](/influxdb3/core/get-started/) guide.\n", "source": "static/openapi/influxdb3-core/tags/tags/influxdb3-core-quick-start.yaml", "staticFilePath": "/openapi/influxdb3-core/tags/tags/influxdb3-core-quick-start.yaml" } @@ -613,7 +613,7 @@ "compatVersion": "v1", "externalDocs": { "description": "Use compatibility APIs to write data", - "url": "/influxdb3/core/write-data/http-api/compatibility-apis/" + "url": "/influxdb/version/write-data/http-api/compatibility-apis/" } }, { @@ -627,7 +627,7 @@ "compatVersion": "v2", "externalDocs": { "description": "Use compatibility APIs to write data", - "url": "/influxdb3/core/write-data/http-api/compatibility-apis/" + "url": "/influxdb/version/write-data/http-api/compatibility-apis/" } }, { @@ -642,7 +642,7 @@ ], "tagDescription": "Write data to InfluxDB 3 using line protocol format.\n\n#### Timestamp precision across write APIs\n\nInfluxDB 3 provides multiple write endpoints for compatibility with different InfluxDB versions.\nThe following table compares timestamp precision support across v1, v2, and v3 write APIs:\n\n| Precision | v1 (`/write`) | v2 (`/api/v2/write`) | v3 (`/api/v3/write_lp`) |\n|-----------|---------------|----------------------|-------------------------|\n| **Auto detection** | ❌ No | ❌ No | ✅ `auto` (default) |\n| **Seconds** | ✅ `s` | ✅ `s` | ✅ `second` |\n| **Milliseconds** | ✅ `ms` | ✅ `ms` | ✅ `millisecond` |\n| **Microseconds** | ✅ `u` or `µ` | ✅ `us` | ✅ `microsecond` |\n| **Nanoseconds** | ✅ `ns` | ✅ `ns` | ✅ `nanosecond` |\n| **Minutes** | ✅ `m` | ❌ No | ❌ No |\n| **Hours** | ✅ `h` | ❌ No | ❌ No |\n| **Default** | Nanosecond | Nanosecond | **Auto** (guessed) |\n\nAll timestamps are stored internally as nanoseconds.\n", "related": [ - "/influxdb3/core/write-data/http-api/compatibility-apis/" + "/influxdb/version/write-data/http-api/compatibility-apis/" ], "source": "static/openapi/influxdb3-core/tags/tags/influxdb3-core-write-data.yaml", "staticFilePath": "/openapi/influxdb3-core/tags/tags/influxdb3-core-write-data.yaml" diff --git a/data/article_data/influxdb/influxdb3_core/articles.yml b/data/article_data/influxdb/influxdb3_core/articles.yml index a1ac5b1a7f..221e298d4d 100644 --- a/data/article_data/influxdb/influxdb3_core/articles.yml +++ b/data/article_data/influxdb/influxdb3_core/articles.yml @@ -542,9 +542,9 @@ articles: trigger On Request plugins. - InfluxDB 3 Core provides the InfluxDB 3 processing engine, an embedded - Python VM that can dynamically load and trigger Python plugins in - response to events in your database. + InfluxDB 3 provides the InfluxDB 3 processing engine, an embedded Python + VM that can dynamically load and trigger Python plugins in response to + events in your database. Use Processing engine plugins and triggers to run code and perform tasks for different database events. @@ -633,9 +633,9 @@ articles: trigger On Request plugins. - InfluxDB 3 Core provides the InfluxDB 3 processing engine, an embedded - Python VM that can dynamically load and trigger Python plugins in - response to events in your database. + InfluxDB 3 provides the InfluxDB 3 processing engine, an embedded Python + VM that can dynamically load and trigger Python plugins in response to + events in your database. Use Processing engine plugins and triggers to run code and perform tasks for different database events. @@ -692,7 +692,7 @@ articles: compatVersion: v1 externalDocs: description: Use the InfluxDB v1 HTTP query API and InfluxQL to query data - url: /influxdb3/core/query-data/execute-queries/influxdb-v1-api/ + url: /influxdb/version/query-data/execute-queries/influxdb-v1-api/ - operationId: PostExecuteV1Query method: POST path: /query @@ -702,10 +702,10 @@ articles: compatVersion: v1 externalDocs: description: Use the InfluxDB v1 HTTP query API and InfluxQL to query data - url: /influxdb3/core/query-data/execute-queries/influxdb-v1-api/ + url: /influxdb/version/query-data/execute-queries/influxdb-v1-api/ tagDescription: Query data using SQL or InfluxQL related: - - /influxdb3/core/query-data/execute-queries/influxdb-v1-api/ + - /influxdb/version/query-data/execute-queries/influxdb-v1-api/ source: static/openapi/influxdb3-core/tags/tags/influxdb3-core-query-data.yaml staticFilePath: /openapi/influxdb3-core/tags/tags/influxdb3-core-query-data.yaml - path: api/quick-start @@ -755,7 +755,7 @@ articles: {"room":"Living room","temp":71.5,"time":"2025-02-25T20:19:34.984098"} ``` - For more information about using InfluxDB 3 Core, see the [Get + For more information about using InfluxDB 3, see the [Get started](/influxdb3/core/get-started/) guide. tag: Quick start isConceptual: true @@ -803,7 +803,7 @@ articles: {"room":"Living room","temp":71.5,"time":"2025-02-25T20:19:34.984098"} ``` - For more information about using InfluxDB 3 Core, see the [Get + For more information about using InfluxDB 3, see the [Get started](/influxdb3/core/get-started/) guide. source: static/openapi/influxdb3-core/tags/tags/influxdb3-core-quick-start.yaml staticFilePath: /openapi/influxdb3-core/tags/tags/influxdb3-core-quick-start.yaml @@ -934,7 +934,7 @@ articles: compatVersion: v1 externalDocs: description: Use compatibility APIs to write data - url: /influxdb3/core/write-data/http-api/compatibility-apis/ + url: /influxdb/version/write-data/http-api/compatibility-apis/ - operationId: PostV2Write method: POST path: /api/v2/write @@ -944,7 +944,7 @@ articles: compatVersion: v2 externalDocs: description: Use compatibility APIs to write data - url: /influxdb3/core/write-data/http-api/compatibility-apis/ + url: /influxdb/version/write-data/http-api/compatibility-apis/ - operationId: PostWriteLP method: POST path: /api/v3/write_lp @@ -989,6 +989,6 @@ articles: All timestamps are stored internally as nanoseconds. related: - - /influxdb3/core/write-data/http-api/compatibility-apis/ + - /influxdb/version/write-data/http-api/compatibility-apis/ source: static/openapi/influxdb3-core/tags/tags/influxdb3-core-write-data.yaml staticFilePath: /openapi/influxdb3-core/tags/tags/influxdb3-core-write-data.yaml diff --git a/data/article_data/influxdb/influxdb3_enterprise/articles.json b/data/article_data/influxdb/influxdb3_enterprise/articles.json index 7ecdd9e12c..438e358f0c 100644 --- a/data/article_data/influxdb/influxdb3_enterprise/articles.json +++ b/data/article_data/influxdb/influxdb3_enterprise/articles.json @@ -5,11 +5,11 @@ "fields": { "name": "Auth token", "describes": [ - "/api/v3/configure/enterprise/token", "/api/v3/configure/token/admin", "/api/v3/configure/token/admin/regenerate", "/api/v3/configure/token", - "/api/v3/configure/token/named_admin" + "/api/v3/configure/token/named_admin", + "/api/v3/configure/enterprise/token" ], "title": "Auth token", "description": "Manage tokens for authentication and authorization", @@ -17,15 +17,6 @@ "isConceptual": false, "menuGroup": "Other", "operations": [ - { - "operationId": "PostCreateResourceToken", - "method": "POST", - "path": "/api/v3/configure/enterprise/token", - "summary": "Create a resource token", - "tags": [ - "Auth token" - ] - }, { "operationId": "PostCreateAdminToken", "method": "POST", @@ -61,6 +52,15 @@ "tags": [ "Auth token" ] + }, + { + "operationId": "PostCreateResourceToken", + "method": "POST", + "path": "/api/v3/configure/enterprise/token", + "summary": "Create a resource token", + "tags": [ + "Auth token" + ] } ], "tagDescription": "Manage tokens for authentication and authorization", @@ -73,26 +73,17 @@ "fields": { "name": "Authentication", "describes": [ - "/api/v3/configure/enterprise/token", "/api/v3/configure/token/admin/regenerate", "/api/v3/configure/token", - "/api/v3/configure/token/named_admin" + "/api/v3/configure/token/named_admin", + "/api/v3/configure/enterprise/token" ], "title": "Authentication", - "description": "Depending on your workflow, use one of the following schemes to authenticate to the InfluxDB 3 API:\n| Authentication scheme | Works with |\n|:----------------------|:-----------|\n| Bearer authentication | All endpoints |\n| Token authentication | v1 and v2 compatibility endpoints (`/write`, `/query`, `/api/v2/write`) |\n| Basic authentication | v1 compatibility endpoints (`/write`, `/query`) |\n| Querystring authentication | v1 compatibility endpoints (`/write`, `/query`) |\nSee the **Security Schemes** section below for details on each authentication method.\n", + "description": "Depending on your workflow, use one of the following schemes to authenticate to the InfluxDB 3 API:\n\n| Authentication scheme | Works with |\n|:----------------------|:-----------|\n| Bearer authentication | All endpoints |\n| Token authentication | v1 and v2 compatibility endpoints (`/write`, `/query`, `/api/v2/write`) |\n| Basic authentication | v1 compatibility endpoints (`/write`, `/query`) |\n| Querystring authentication | v1 compatibility endpoints (`/write`, `/query`) |\n\nSee the **Security Schemes** section below for details on each authentication method.\n", "tag": "Authentication", "isConceptual": true, "menuGroup": "Concepts", "operations": [ - { - "operationId": "PostCreateResourceToken", - "method": "POST", - "path": "/api/v3/configure/enterprise/token", - "summary": "Create a resource token", - "tags": [ - "Authentication" - ] - }, { "operationId": "PostRegenerateAdminToken", "method": "POST", @@ -119,9 +110,18 @@ "tags": [ "Authentication" ] + }, + { + "operationId": "PostCreateResourceToken", + "method": "POST", + "path": "/api/v3/configure/enterprise/token", + "summary": "Create a resource token", + "tags": [ + "Authentication" + ] } ], - "tagDescription": "Depending on your workflow, use one of the following schemes to authenticate to the InfluxDB 3 API:\n| Authentication scheme | Works with |\n|:----------------------|:-----------|\n| Bearer authentication | All endpoints |\n| Token authentication | v1 and v2 compatibility endpoints (`/write`, `/query`, `/api/v2/write`) |\n| Basic authentication | v1 compatibility endpoints (`/write`, `/query`) |\n| Querystring authentication | v1 compatibility endpoints (`/write`, `/query`) |\nSee the **Security Schemes** section below for details on each authentication method.\n", + "tagDescription": "Depending on your workflow, use one of the following schemes to authenticate to the InfluxDB 3 API:\n\n| Authentication scheme | Works with |\n|:----------------------|:-----------|\n| Bearer authentication | All endpoints |\n| Token authentication | v1 and v2 compatibility endpoints (`/write`, `/query`, `/api/v2/write`) |\n| Basic authentication | v1 compatibility endpoints (`/write`, `/query`) |\n| Querystring authentication | v1 compatibility endpoints (`/write`, `/query`) |\n\nSee the **Security Schemes** section below for details on each authentication method.\n", "showSecuritySchemes": true, "source": "static/openapi/influxdb3-enterprise/tags/tags/influxdb3-enterprise-authentication.yaml", "staticFilePath": "/openapi/influxdb3-enterprise/tags/tags/influxdb3-enterprise-authentication.yaml" @@ -136,7 +136,7 @@ "/api/v3/configure/last_cache" ], "title": "Cache data", - "description": "Manage the in-memory cache.\n#### Distinct Value Cache\nThe Distinct Value Cache (DVC) lets you cache distinct\nvalues of one or more columns in a table, improving the performance of\nqueries that return distinct tag and field values. \nThe DVC is an in-memory cache that stores distinct values for specific columns\nin a table. When you create an DVC, you can specify what columns' distinct\nvalues to cache, the maximum number of distinct value combinations to cache, and\nthe maximum age of cached values. A DVC is associated with a table, which can\nhave multiple DVCs.\n#### Last value cache\nThe Last Value Cache (LVC) lets you cache the most recent\nvalues for specific fields in a table, improving the performance of queries that\nreturn the most recent value of a field for specific series or the last N values\nof a field.\nThe LVC is an in-memory cache that stores the last N number of values for\nspecific fields of series in a table. When you create an LVC, you can specify\nwhat fields to cache, what tags to use to identify each series, and the\nnumber of values to cache for each unique series.\nAn LVC is associated with a table, which can have multiple LVCs.\n#### Related guides\n- [Manage the Distinct Value Cache](/influxdb3/enterprise/admin/distinct-value-cache/)\n- [Manage the Last Value Cache](/influxdb3/enterprise/admin/last-value-cache/)\n", + "description": "Manage the in-memory cache.\n\n#### Distinct Value Cache\n\nThe Distinct Value Cache (DVC) lets you cache distinct\nvalues of one or more columns in a table, improving the performance of\nqueries that return distinct tag and field values. \n\nThe DVC is an in-memory cache that stores distinct values for specific columns\nin a table. When you create an DVC, you can specify what columns' distinct\nvalues to cache, the maximum number of distinct value combinations to cache, and\nthe maximum age of cached values. A DVC is associated with a table, which can\nhave multiple DVCs.\n\n#### Last value cache\n\nThe Last Value Cache (LVC) lets you cache the most recent\nvalues for specific fields in a table, improving the performance of queries that\nreturn the most recent value of a field for specific series or the last N values\nof a field.\n\nThe LVC is an in-memory cache that stores the last N number of values for\nspecific fields of series in a table. When you create an LVC, you can specify\nwhat fields to cache, what tags to use to identify each series, and the\nnumber of values to cache for each unique series.\nAn LVC is associated with a table, which can have multiple LVCs.\n\n#### Related guides\n\n- [Manage the Distinct Value Cache](/influxdb3/enterprise/admin/distinct-value-cache/)\n- [Manage the Last Value Cache](/influxdb3/enterprise/admin/last-value-cache/)\n", "tag": "Cache data", "isConceptual": false, "menuGroup": "Data Operations", @@ -178,7 +178,7 @@ ] } ], - "tagDescription": "Manage the in-memory cache.\n#### Distinct Value Cache\nThe Distinct Value Cache (DVC) lets you cache distinct\nvalues of one or more columns in a table, improving the performance of\nqueries that return distinct tag and field values. \nThe DVC is an in-memory cache that stores distinct values for specific columns\nin a table. When you create an DVC, you can specify what columns' distinct\nvalues to cache, the maximum number of distinct value combinations to cache, and\nthe maximum age of cached values. A DVC is associated with a table, which can\nhave multiple DVCs.\n#### Last value cache\nThe Last Value Cache (LVC) lets you cache the most recent\nvalues for specific fields in a table, improving the performance of queries that\nreturn the most recent value of a field for specific series or the last N values\nof a field.\nThe LVC is an in-memory cache that stores the last N number of values for\nspecific fields of series in a table. When you create an LVC, you can specify\nwhat fields to cache, what tags to use to identify each series, and the\nnumber of values to cache for each unique series.\nAn LVC is associated with a table, which can have multiple LVCs.\n#### Related guides\n- [Manage the Distinct Value Cache](/influxdb3/enterprise/admin/distinct-value-cache/)\n- [Manage the Last Value Cache](/influxdb3/enterprise/admin/last-value-cache/)\n", + "tagDescription": "Manage the in-memory cache.\n\n#### Distinct Value Cache\n\nThe Distinct Value Cache (DVC) lets you cache distinct\nvalues of one or more columns in a table, improving the performance of\nqueries that return distinct tag and field values. \n\nThe DVC is an in-memory cache that stores distinct values for specific columns\nin a table. When you create an DVC, you can specify what columns' distinct\nvalues to cache, the maximum number of distinct value combinations to cache, and\nthe maximum age of cached values. A DVC is associated with a table, which can\nhave multiple DVCs.\n\n#### Last value cache\n\nThe Last Value Cache (LVC) lets you cache the most recent\nvalues for specific fields in a table, improving the performance of queries that\nreturn the most recent value of a field for specific series or the last N values\nof a field.\n\nThe LVC is an in-memory cache that stores the last N number of values for\nspecific fields of series in a table. When you create an LVC, you can specify\nwhat fields to cache, what tags to use to identify each series, and the\nnumber of values to cache for each unique series.\nAn LVC is associated with a table, which can have multiple LVCs.\n\n#### Related guides\n\n- [Manage the Distinct Value Cache](/influxdb3/enterprise/admin/distinct-value-cache/)\n- [Manage the Last Value Cache](/influxdb3/enterprise/admin/last-value-cache/)\n", "source": "static/openapi/influxdb3-enterprise/tags/tags/influxdb3-enterprise-cache-data.yaml", "staticFilePath": "/openapi/influxdb3-enterprise/tags/tags/influxdb3-enterprise-cache-data.yaml" } @@ -255,12 +255,12 @@ "name": "Headers and parameters", "describes": [], "title": "Headers and parameters", - "description": "Most InfluxDB API endpoints require parameters in the request--for example, specifying the database to use.\n### Common parameters\nThe following table shows common parameters used by many InfluxDB API endpoints.\nMany endpoints may require other parameters in the query string or in the\nrequest body that perform functions specific to those endpoints.\n| Query parameter | Value type | Description |\n|:------------------------ |:--------------------- |:-------------------------------------------|\n| `db` | string | The database name |\nInfluxDB HTTP API endpoints use standard HTTP request and response headers.\nThe following table shows common headers used by many InfluxDB API endpoints.\nSome endpoints may use other headers that perform functions more specific to those endpoints--for example,\nthe write endpoints accept the `Content-Encoding` header to indicate that line protocol is compressed in the request body.\n| Header | Value type | Description |\n|:------------------------ |:--------------------- |:-------------------------------------------|\n| `Accept` | string | The content type that the client can understand. |\n| `Authorization` | string | The [authorization scheme and credential](/influxdb3/enterprise/api/authentication/). |\n| `Content-Length` | integer | The size of the entity-body, in bytes. |\n| `Content-Type` | string | The format of the data in the request body. |\n", + "description": "Most InfluxDB API endpoints require parameters in the request--for example, specifying the database to use.\n\n### Common parameters\n\nThe following table shows common parameters used by many InfluxDB API endpoints.\nMany endpoints may require other parameters in the query string or in the\nrequest body that perform functions specific to those endpoints.\n\n| Query parameter | Value type | Description |\n|:------------------------ |:--------------------- |:-------------------------------------------|\n| `db` | string | The database name |\n\nInfluxDB HTTP API endpoints use standard HTTP request and response headers.\nThe following table shows common headers used by many InfluxDB API endpoints.\nSome endpoints may use other headers that perform functions more specific to those endpoints--for example,\nthe write endpoints accept the `Content-Encoding` header to indicate that line protocol is compressed in the request body.\n\n| Header | Value type | Description |\n|:------------------------ |:--------------------- |:-------------------------------------------|\n| `Accept` | string | The content type that the client can understand. |\n| `Authorization` | string | The [authorization scheme and credential](/influxdb3/enterprise/api/authentication/). |\n| `Content-Length` | integer | The size of the entity-body, in bytes. |\n| `Content-Type` | string | The format of the data in the request body. |\n", "tag": "Headers and parameters", "isConceptual": true, "menuGroup": "Concepts", "operations": [], - "tagDescription": "Most InfluxDB API endpoints require parameters in the request--for example, specifying the database to use.\n### Common parameters\nThe following table shows common parameters used by many InfluxDB API endpoints.\nMany endpoints may require other parameters in the query string or in the\nrequest body that perform functions specific to those endpoints.\n| Query parameter | Value type | Description |\n|:------------------------ |:--------------------- |:-------------------------------------------|\n| `db` | string | The database name |\nInfluxDB HTTP API endpoints use standard HTTP request and response headers.\nThe following table shows common headers used by many InfluxDB API endpoints.\nSome endpoints may use other headers that perform functions more specific to those endpoints--for example,\nthe write endpoints accept the `Content-Encoding` header to indicate that line protocol is compressed in the request body.\n| Header | Value type | Description |\n|:------------------------ |:--------------------- |:-------------------------------------------|\n| `Accept` | string | The content type that the client can understand. |\n| `Authorization` | string | The [authorization scheme and credential](/influxdb3/enterprise/api/authentication/). |\n| `Content-Length` | integer | The size of the entity-body, in bytes. |\n| `Content-Type` | string | The format of the data in the request body. |\n", + "tagDescription": "Most InfluxDB API endpoints require parameters in the request--for example, specifying the database to use.\n\n### Common parameters\n\nThe following table shows common parameters used by many InfluxDB API endpoints.\nMany endpoints may require other parameters in the query string or in the\nrequest body that perform functions specific to those endpoints.\n\n| Query parameter | Value type | Description |\n|:------------------------ |:--------------------- |:-------------------------------------------|\n| `db` | string | The database name |\n\nInfluxDB HTTP API endpoints use standard HTTP request and response headers.\nThe following table shows common headers used by many InfluxDB API endpoints.\nSome endpoints may use other headers that perform functions more specific to those endpoints--for example,\nthe write endpoints accept the `Content-Encoding` header to indicate that line protocol is compressed in the request body.\n\n| Header | Value type | Description |\n|:------------------------ |:--------------------- |:-------------------------------------------|\n| `Accept` | string | The content type that the client can understand. |\n| `Authorization` | string | The [authorization scheme and credential](/influxdb3/enterprise/api/authentication/). |\n| `Content-Length` | integer | The size of the entity-body, in bytes. |\n| `Content-Type` | string | The format of the data in the request body. |\n", "source": "static/openapi/influxdb3-enterprise/tags/tags/influxdb3-enterprise-headers-and-parameters.yaml", "staticFilePath": "/openapi/influxdb3-enterprise/tags/tags/influxdb3-enterprise-headers-and-parameters.yaml" } @@ -271,12 +271,12 @@ "name": "Migrate from InfluxDB v1 or v2", "describes": [], "title": "Migrate from InfluxDB v1 or v2", - "description": "Migrate your existing InfluxDB v1 or v2 workloads to InfluxDB 3.\nInfluxDB 3 provides compatibility endpoints that work with InfluxDB 1.x and 2.x client libraries and tools.\nOperations marked with v1 or v2 badges are compatible with the respective InfluxDB version.\n### Migration guides\n- [Migrate from InfluxDB v1](/influxdb3/enterprise/guides/migrate/influxdb-1x/) - For users migrating from InfluxDB 1.x\n- [Migrate from InfluxDB v2](/influxdb3/enterprise/guides/migrate/influxdb-2x/) - For users migrating from InfluxDB 2.x or Cloud\n- [Use compatibility APIs to write data](/influxdb3/enterprise/write-data/http-api/compatibility-apis/) - v1 and v2 write endpoints\n- [Use the v1 HTTP query API](/influxdb3/enterprise/query-data/execute-queries/influxdb-v1-api/) - InfluxQL queries via HTTP\n", + "description": "Migrate your existing InfluxDB v1 or v2 workloads to InfluxDB 3.\n\nInfluxDB 3 provides compatibility endpoints that work with InfluxDB 1.x and 2.x client libraries and tools.\nOperations marked with v1 or v2 badges are compatible with the respective InfluxDB version.\n\n### Migration guides\n\n- [Migrate from InfluxDB v1](/influxdb3/enterprise/guides/migrate/influxdb-1x/) - For users migrating from InfluxDB 1.x\n- [Migrate from InfluxDB v2](/influxdb3/enterprise/guides/migrate/influxdb-2x/) - For users migrating from InfluxDB 2.x or Cloud\n- [Use compatibility APIs to write data](/influxdb3/enterprise/write-data/http-api/compatibility-apis/) - v1 and v2 write endpoints\n- [Use the v1 HTTP query API](/influxdb3/enterprise/query-data/execute-queries/influxdb-v1-api/) - InfluxQL queries via HTTP\n", "tag": "Migrate from InfluxDB v1 or v2", "isConceptual": true, "menuGroup": "Other", "operations": [], - "tagDescription": "Migrate your existing InfluxDB v1 or v2 workloads to InfluxDB 3.\nInfluxDB 3 provides compatibility endpoints that work with InfluxDB 1.x and 2.x client libraries and tools.\nOperations marked with v1 or v2 badges are compatible with the respective InfluxDB version.\n### Migration guides\n- [Migrate from InfluxDB v1](/influxdb3/enterprise/guides/migrate/influxdb-1x/) - For users migrating from InfluxDB 1.x\n- [Migrate from InfluxDB v2](/influxdb3/enterprise/guides/migrate/influxdb-2x/) - For users migrating from InfluxDB 2.x or Cloud\n- [Use compatibility APIs to write data](/influxdb3/enterprise/write-data/http-api/compatibility-apis/) - v1 and v2 write endpoints\n- [Use the v1 HTTP query API](/influxdb3/enterprise/query-data/execute-queries/influxdb-v1-api/) - InfluxQL queries via HTTP\n", + "tagDescription": "Migrate your existing InfluxDB v1 or v2 workloads to InfluxDB 3.\n\nInfluxDB 3 provides compatibility endpoints that work with InfluxDB 1.x and 2.x client libraries and tools.\nOperations marked with v1 or v2 badges are compatible with the respective InfluxDB version.\n\n### Migration guides\n\n- [Migrate from InfluxDB v1](/influxdb3/enterprise/guides/migrate/influxdb-1x/) - For users migrating from InfluxDB 1.x\n- [Migrate from InfluxDB v2](/influxdb3/enterprise/guides/migrate/influxdb-2x/) - For users migrating from InfluxDB 2.x or Cloud\n- [Use compatibility APIs to write data](/influxdb3/enterprise/write-data/http-api/compatibility-apis/) - v1 and v2 write endpoints\n- [Use the v1 HTTP query API](/influxdb3/enterprise/query-data/execute-queries/influxdb-v1-api/) - InfluxQL queries via HTTP\n", "source": "static/openapi/influxdb3-enterprise/tags/tags/influxdb3-enterprise-migrate-from-influxdb-v1-or-v2.yaml", "staticFilePath": "/openapi/influxdb3-enterprise/tags/tags/influxdb3-enterprise-migrate-from-influxdb-v1-or-v2.yaml" } @@ -298,7 +298,7 @@ "/api/v3/plugins/directory" ], "title": "Processing engine", - "description": "Manage Processing engine triggers, test plugins, and send requests to trigger On Request plugins.\nInfluxDB 3 Enterprise provides the InfluxDB 3 processing engine, an embedded Python VM that can dynamically load and trigger Python plugins in response to events in your database.\nUse Processing engine plugins and triggers to run code and perform tasks for different database events.\nTo get started with the processing engine, see the [Processing engine and Python plugins](/influxdb3/enterprise/processing-engine/) guide.\n", + "description": "Manage Processing engine triggers, test plugins, and send requests to trigger On Request plugins.\n\nInfluxDB 3 provides the InfluxDB 3 processing engine, an embedded Python VM that can dynamically load and trigger Python plugins in response to events in your database.\nUse Processing engine plugins and triggers to run code and perform tasks for different database events.\n\nTo get started with the processing engine, see the [Processing engine and Python plugins](/influxdb3/enterprise/processing-engine/) guide.\n", "tag": "Processing engine", "isConceptual": false, "menuGroup": "Processing Engine", @@ -412,7 +412,7 @@ ] } ], - "tagDescription": "Manage Processing engine triggers, test plugins, and send requests to trigger On Request plugins.\nInfluxDB 3 Enterprise provides the InfluxDB 3 processing engine, an embedded Python VM that can dynamically load and trigger Python plugins in response to events in your database.\nUse Processing engine plugins and triggers to run code and perform tasks for different database events.\nTo get started with the processing engine, see the [Processing engine and Python plugins](/influxdb3/enterprise/processing-engine/) guide.\n", + "tagDescription": "Manage Processing engine triggers, test plugins, and send requests to trigger On Request plugins.\n\nInfluxDB 3 provides the InfluxDB 3 processing engine, an embedded Python VM that can dynamically load and trigger Python plugins in response to events in your database.\nUse Processing engine plugins and triggers to run code and perform tasks for different database events.\n\nTo get started with the processing engine, see the [Processing engine and Python plugins](/influxdb3/enterprise/processing-engine/) guide.\n", "source": "static/openapi/influxdb3-enterprise/tags/tags/influxdb3-enterprise-processing-engine.yaml", "staticFilePath": "/openapi/influxdb3-enterprise/tags/tags/influxdb3-enterprise-processing-engine.yaml" } @@ -479,7 +479,7 @@ "compatVersion": "v1", "externalDocs": { "description": "Use the InfluxDB v1 HTTP query API and InfluxQL to query data", - "url": "/influxdb3/enterprise/query-data/execute-queries/influxdb-v1-api/" + "url": "/influxdb/version/query-data/execute-queries/influxdb-v1-api/" } }, { @@ -493,13 +493,13 @@ "compatVersion": "v1", "externalDocs": { "description": "Use the InfluxDB v1 HTTP query API and InfluxQL to query data", - "url": "/influxdb3/enterprise/query-data/execute-queries/influxdb-v1-api/" + "url": "/influxdb/version/query-data/execute-queries/influxdb-v1-api/" } } ], "tagDescription": "Query data using SQL or InfluxQL", "related": [ - "/influxdb3/enterprise/query-data/execute-queries/influxdb-v1-api/" + "/influxdb/version/query-data/execute-queries/influxdb-v1-api/" ], "source": "static/openapi/influxdb3-enterprise/tags/tags/influxdb3-enterprise-query-data.yaml", "staticFilePath": "/openapi/influxdb3-enterprise/tags/tags/influxdb3-enterprise-query-data.yaml" @@ -511,12 +511,12 @@ "name": "Quick start", "describes": [], "title": "Quick start", - "description": "1. [Create an admin token](#section/Authentication) to authorize API requests.\n ```bash\n curl -X POST \"http://localhost:8181/api/v3/configure/token/admin\"\n ```\n2. [Check the status](#section/Server-information) of the InfluxDB server.\n ```bash\n curl \"http://localhost:8181/health\" \\\n --header \"Authorization: Bearer ADMIN_TOKEN\"\n ```\n3. [Write data](#operation/PostWriteLP) to InfluxDB.\n ```bash\n curl \"http://localhost:8181/api/v3/write_lp?db=sensors&precision=auto\"\n --header \"Authorization: Bearer ADMIN_TOKEN\" \\\n --data-raw \"home,room=Kitchen temp=72.0\n home,room=Living\\ room temp=71.5\"\n ```\n If all data is written, the response is `204 No Content`.\n4. [Query data](#operation/GetExecuteQuerySQL) from InfluxDB.\n ```bash\n curl -G \"http://localhost:8181/api/v3/query_sql\" \\\n --header \"Authorization: Bearer ADMIN_TOKEN\" \\\n --data-urlencode \"db=sensors\" \\\n --data-urlencode \"q=SELECT * FROM home WHERE room='Living room'\" \\\n --data-urlencode \"format=jsonl\"\n ```\n \n Output:\n ```jsonl\n {\"room\":\"Living room\",\"temp\":71.5,\"time\":\"2025-02-25T20:19:34.984098\"}\n ```\n \nFor more information about using InfluxDB 3 Enterprise, see the [Get started](/influxdb3/enterprise/get-started/) guide.\n", + "description": "1. [Create an admin token](#section/Authentication) to authorize API requests.\n\n ```bash\n curl -X POST \"http://localhost:8181/api/v3/configure/token/admin\"\n ```\n2. [Check the status](#section/Server-information) of the InfluxDB server.\n\n ```bash\n curl \"http://localhost:8181/health\" \\\n --header \"Authorization: Bearer ADMIN_TOKEN\"\n ```\n\n3. [Write data](#operation/PostWriteLP) to InfluxDB.\n\n ```bash\n curl \"http://localhost:8181/api/v3/write_lp?db=sensors&precision=auto\"\n --header \"Authorization: Bearer ADMIN_TOKEN\" \\\n --data-raw \"home,room=Kitchen temp=72.0\n home,room=Living\\ room temp=71.5\"\n ```\n\n If all data is written, the response is `204 No Content`.\n\n4. [Query data](#operation/GetExecuteQuerySQL) from InfluxDB.\n\n ```bash\n curl -G \"http://localhost:8181/api/v3/query_sql\" \\\n --header \"Authorization: Bearer ADMIN_TOKEN\" \\\n --data-urlencode \"db=sensors\" \\\n --data-urlencode \"q=SELECT * FROM home WHERE room='Living room'\" \\\n --data-urlencode \"format=jsonl\"\n ```\n \n Output:\n\n ```jsonl\n {\"room\":\"Living room\",\"temp\":71.5,\"time\":\"2025-02-25T20:19:34.984098\"}\n ```\n \nFor more information about using InfluxDB 3, see the [Get started](/influxdb3/enterprise/get-started/) guide.\n", "tag": "Quick start", "isConceptual": true, "menuGroup": "Concepts", "operations": [], - "tagDescription": "1. [Create an admin token](#section/Authentication) to authorize API requests.\n ```bash\n curl -X POST \"http://localhost:8181/api/v3/configure/token/admin\"\n ```\n2. [Check the status](#section/Server-information) of the InfluxDB server.\n ```bash\n curl \"http://localhost:8181/health\" \\\n --header \"Authorization: Bearer ADMIN_TOKEN\"\n ```\n3. [Write data](#operation/PostWriteLP) to InfluxDB.\n ```bash\n curl \"http://localhost:8181/api/v3/write_lp?db=sensors&precision=auto\"\n --header \"Authorization: Bearer ADMIN_TOKEN\" \\\n --data-raw \"home,room=Kitchen temp=72.0\n home,room=Living\\ room temp=71.5\"\n ```\n If all data is written, the response is `204 No Content`.\n4. [Query data](#operation/GetExecuteQuerySQL) from InfluxDB.\n ```bash\n curl -G \"http://localhost:8181/api/v3/query_sql\" \\\n --header \"Authorization: Bearer ADMIN_TOKEN\" \\\n --data-urlencode \"db=sensors\" \\\n --data-urlencode \"q=SELECT * FROM home WHERE room='Living room'\" \\\n --data-urlencode \"format=jsonl\"\n ```\n \n Output:\n ```jsonl\n {\"room\":\"Living room\",\"temp\":71.5,\"time\":\"2025-02-25T20:19:34.984098\"}\n ```\n \nFor more information about using InfluxDB 3 Enterprise, see the [Get started](/influxdb3/enterprise/get-started/) guide.\n", + "tagDescription": "1. [Create an admin token](#section/Authentication) to authorize API requests.\n\n ```bash\n curl -X POST \"http://localhost:8181/api/v3/configure/token/admin\"\n ```\n2. [Check the status](#section/Server-information) of the InfluxDB server.\n\n ```bash\n curl \"http://localhost:8181/health\" \\\n --header \"Authorization: Bearer ADMIN_TOKEN\"\n ```\n\n3. [Write data](#operation/PostWriteLP) to InfluxDB.\n\n ```bash\n curl \"http://localhost:8181/api/v3/write_lp?db=sensors&precision=auto\"\n --header \"Authorization: Bearer ADMIN_TOKEN\" \\\n --data-raw \"home,room=Kitchen temp=72.0\n home,room=Living\\ room temp=71.5\"\n ```\n\n If all data is written, the response is `204 No Content`.\n\n4. [Query data](#operation/GetExecuteQuerySQL) from InfluxDB.\n\n ```bash\n curl -G \"http://localhost:8181/api/v3/query_sql\" \\\n --header \"Authorization: Bearer ADMIN_TOKEN\" \\\n --data-urlencode \"db=sensors\" \\\n --data-urlencode \"q=SELECT * FROM home WHERE room='Living room'\" \\\n --data-urlencode \"format=jsonl\"\n ```\n \n Output:\n\n ```jsonl\n {\"room\":\"Living room\",\"temp\":71.5,\"time\":\"2025-02-25T20:19:34.984098\"}\n ```\n \nFor more information about using InfluxDB 3, see the [Get started](/influxdb3/enterprise/get-started/) guide.\n", "source": "static/openapi/influxdb3-enterprise/tags/tags/influxdb3-enterprise-quick-start.yaml", "staticFilePath": "/openapi/influxdb3-enterprise/tags/tags/influxdb3-enterprise-quick-start.yaml" } @@ -646,7 +646,7 @@ "/api/v3/write_lp" ], "title": "Write data", - "description": "Write data to InfluxDB 3 using line protocol format.\n#### Timestamp precision across write APIs\nInfluxDB 3 provides multiple write endpoints for compatibility with different InfluxDB versions.\nThe following table compares timestamp precision support across v1, v2, and v3 write APIs:\n| Precision | v1 (`/write`) | v2 (`/api/v2/write`) | v3 (`/api/v3/write_lp`) |\n|-----------|---------------|----------------------|-------------------------|\n| **Auto detection** | ❌ No | ❌ No | ✅ `auto` (default) |\n| **Seconds** | ✅ `s` | ✅ `s` | ✅ `second` |\n| **Milliseconds** | ✅ `ms` | ✅ `ms` | ✅ `millisecond` |\n| **Microseconds** | ✅ `u` or `µ` | ✅ `us` | ✅ `microsecond` |\n| **Nanoseconds** | ✅ `ns` | ✅ `ns` | ✅ `nanosecond` |\n| **Minutes** | ✅ `m` | ❌ No | ❌ No |\n| **Hours** | ✅ `h` | ❌ No | ❌ No |\n| **Default** | Nanosecond | Nanosecond | **Auto** (guessed) |\nAll timestamps are stored internally as nanoseconds.\n", + "description": "Write data to InfluxDB 3 using line protocol format.\n\n#### Timestamp precision across write APIs\n\nInfluxDB 3 provides multiple write endpoints for compatibility with different InfluxDB versions.\nThe following table compares timestamp precision support across v1, v2, and v3 write APIs:\n\n| Precision | v1 (`/write`) | v2 (`/api/v2/write`) | v3 (`/api/v3/write_lp`) |\n|-----------|---------------|----------------------|-------------------------|\n| **Auto detection** | ❌ No | ❌ No | ✅ `auto` (default) |\n| **Seconds** | ✅ `s` | ✅ `s` | ✅ `second` |\n| **Milliseconds** | ✅ `ms` | ✅ `ms` | ✅ `millisecond` |\n| **Microseconds** | ✅ `u` or `µ` | ✅ `us` | ✅ `microsecond` |\n| **Nanoseconds** | ✅ `ns` | ✅ `ns` | ✅ `nanosecond` |\n| **Minutes** | ✅ `m` | ❌ No | ❌ No |\n| **Hours** | ✅ `h` | ❌ No | ❌ No |\n| **Default** | Nanosecond | Nanosecond | **Auto** (guessed) |\n\nAll timestamps are stored internally as nanoseconds.\n", "tag": "Write data", "isConceptual": false, "menuGroup": "Data Operations", @@ -662,7 +662,7 @@ "compatVersion": "v1", "externalDocs": { "description": "Use compatibility APIs to write data", - "url": "/influxdb3/enterprise/write-data/http-api/compatibility-apis/" + "url": "/influxdb/version/write-data/http-api/compatibility-apis/" } }, { @@ -676,7 +676,7 @@ "compatVersion": "v2", "externalDocs": { "description": "Use compatibility APIs to write data", - "url": "/influxdb3/enterprise/write-data/http-api/compatibility-apis/" + "url": "/influxdb/version/write-data/http-api/compatibility-apis/" } }, { @@ -689,9 +689,9 @@ ] } ], - "tagDescription": "Write data to InfluxDB 3 using line protocol format.\n#### Timestamp precision across write APIs\nInfluxDB 3 provides multiple write endpoints for compatibility with different InfluxDB versions.\nThe following table compares timestamp precision support across v1, v2, and v3 write APIs:\n| Precision | v1 (`/write`) | v2 (`/api/v2/write`) | v3 (`/api/v3/write_lp`) |\n|-----------|---------------|----------------------|-------------------------|\n| **Auto detection** | ❌ No | ❌ No | ✅ `auto` (default) |\n| **Seconds** | ✅ `s` | ✅ `s` | ✅ `second` |\n| **Milliseconds** | ✅ `ms` | ✅ `ms` | ✅ `millisecond` |\n| **Microseconds** | ✅ `u` or `µ` | ✅ `us` | ✅ `microsecond` |\n| **Nanoseconds** | ✅ `ns` | ✅ `ns` | ✅ `nanosecond` |\n| **Minutes** | ✅ `m` | ❌ No | ❌ No |\n| **Hours** | ✅ `h` | ❌ No | ❌ No |\n| **Default** | Nanosecond | Nanosecond | **Auto** (guessed) |\nAll timestamps are stored internally as nanoseconds.\n", + "tagDescription": "Write data to InfluxDB 3 using line protocol format.\n\n#### Timestamp precision across write APIs\n\nInfluxDB 3 provides multiple write endpoints for compatibility with different InfluxDB versions.\nThe following table compares timestamp precision support across v1, v2, and v3 write APIs:\n\n| Precision | v1 (`/write`) | v2 (`/api/v2/write`) | v3 (`/api/v3/write_lp`) |\n|-----------|---------------|----------------------|-------------------------|\n| **Auto detection** | ❌ No | ❌ No | ✅ `auto` (default) |\n| **Seconds** | ✅ `s` | ✅ `s` | ✅ `second` |\n| **Milliseconds** | ✅ `ms` | ✅ `ms` | ✅ `millisecond` |\n| **Microseconds** | ✅ `u` or `µ` | ✅ `us` | ✅ `microsecond` |\n| **Nanoseconds** | ✅ `ns` | ✅ `ns` | ✅ `nanosecond` |\n| **Minutes** | ✅ `m` | ❌ No | ❌ No |\n| **Hours** | ✅ `h` | ❌ No | ❌ No |\n| **Default** | Nanosecond | Nanosecond | **Auto** (guessed) |\n\nAll timestamps are stored internally as nanoseconds.\n", "related": [ - "/influxdb3/enterprise/write-data/http-api/compatibility-apis/" + "/influxdb/version/write-data/http-api/compatibility-apis/" ], "source": "static/openapi/influxdb3-enterprise/tags/tags/influxdb3-enterprise-write-data.yaml", "staticFilePath": "/openapi/influxdb3-enterprise/tags/tags/influxdb3-enterprise-write-data.yaml" diff --git a/data/article_data/influxdb/influxdb3_enterprise/articles.yml b/data/article_data/influxdb/influxdb3_enterprise/articles.yml index c1f4f3d8eb..d364089748 100644 --- a/data/article_data/influxdb/influxdb3_enterprise/articles.yml +++ b/data/article_data/influxdb/influxdb3_enterprise/articles.yml @@ -3,23 +3,17 @@ articles: fields: name: Auth token describes: - - /api/v3/configure/enterprise/token - /api/v3/configure/token/admin - /api/v3/configure/token/admin/regenerate - /api/v3/configure/token - /api/v3/configure/token/named_admin + - /api/v3/configure/enterprise/token title: Auth token description: Manage tokens for authentication and authorization tag: Auth token isConceptual: false menuGroup: Other operations: - - operationId: PostCreateResourceToken - method: POST - path: /api/v3/configure/enterprise/token - summary: Create a resource token - tags: - - Auth token - operationId: PostCreateAdminToken method: POST path: /api/v3/configure/token/admin @@ -44,6 +38,12 @@ articles: summary: Create named admin token tags: - Auth token + - operationId: PostCreateResourceToken + method: POST + path: /api/v3/configure/enterprise/token + summary: Create a resource token + tags: + - Auth token tagDescription: Manage tokens for authentication and authorization source: >- static/openapi/influxdb3-enterprise/tags/tags/influxdb3-enterprise-auth-token.yaml @@ -53,15 +53,16 @@ articles: fields: name: Authentication describes: - - /api/v3/configure/enterprise/token - /api/v3/configure/token/admin/regenerate - /api/v3/configure/token - /api/v3/configure/token/named_admin + - /api/v3/configure/enterprise/token title: Authentication description: > Depending on your workflow, use one of the following schemes to authenticate to the InfluxDB 3 API: + | Authentication scheme | Works with | |:----------------------|:-----------| @@ -77,18 +78,13 @@ articles: | Querystring authentication | v1 compatibility endpoints (`/write`, `/query`) | + See the **Security Schemes** section below for details on each authentication method. tag: Authentication isConceptual: true menuGroup: Concepts operations: - - operationId: PostCreateResourceToken - method: POST - path: /api/v3/configure/enterprise/token - summary: Create a resource token - tags: - - Authentication - operationId: PostRegenerateAdminToken method: POST path: /api/v3/configure/token/admin/regenerate @@ -107,10 +103,17 @@ articles: summary: Create named admin token tags: - Authentication + - operationId: PostCreateResourceToken + method: POST + path: /api/v3/configure/enterprise/token + summary: Create a resource token + tags: + - Authentication tagDescription: > Depending on your workflow, use one of the following schemes to authenticate to the InfluxDB 3 API: + | Authentication scheme | Works with | |:----------------------|:-----------| @@ -126,6 +129,7 @@ articles: | Querystring authentication | v1 compatibility endpoints (`/write`, `/query`) | + See the **Security Schemes** section below for details on each authentication method. showSecuritySchemes: true @@ -143,14 +147,17 @@ articles: description: > Manage the in-memory cache. + #### Distinct Value Cache + The Distinct Value Cache (DVC) lets you cache distinct values of one or more columns in a table, improving the performance of queries that return distinct tag and field values. + The DVC is an in-memory cache that stores distinct values for specific columns @@ -165,8 +172,10 @@ articles: have multiple DVCs. + #### Last value cache + The Last Value Cache (LVC) lets you cache the most recent values for specific fields in a table, improving the performance of @@ -177,6 +186,7 @@ articles: of a field. + The LVC is an in-memory cache that stores the last N number of values for @@ -189,8 +199,10 @@ articles: An LVC is associated with a table, which can have multiple LVCs. + #### Related guides + - [Manage the Distinct Value Cache](/influxdb3/enterprise/admin/distinct-value-cache/) @@ -227,14 +239,17 @@ articles: tagDescription: > Manage the in-memory cache. + #### Distinct Value Cache + The Distinct Value Cache (DVC) lets you cache distinct values of one or more columns in a table, improving the performance of queries that return distinct tag and field values. + The DVC is an in-memory cache that stores distinct values for specific columns @@ -249,8 +264,10 @@ articles: have multiple DVCs. + #### Last value cache + The Last Value Cache (LVC) lets you cache the most recent values for specific fields in a table, improving the performance of @@ -261,6 +278,7 @@ articles: of a field. + The LVC is an in-memory cache that stores the last N number of values for @@ -273,8 +291,10 @@ articles: An LVC is associated with a table, which can have multiple LVCs. + #### Related guides + - [Manage the Distinct Value Cache](/influxdb3/enterprise/admin/distinct-value-cache/) @@ -341,8 +361,10 @@ articles: Most InfluxDB API endpoints require parameters in the request--for example, specifying the database to use. + ### Common parameters + The following table shows common parameters used by many InfluxDB API endpoints. @@ -351,6 +373,7 @@ articles: request body that perform functions specific to those endpoints. + | Query parameter | Value type | Description | @@ -359,6 +382,7 @@ articles: | `db` | string | The database name | + InfluxDB HTTP API endpoints use standard HTTP request and response headers. @@ -371,6 +395,7 @@ articles: the write endpoints accept the `Content-Encoding` header to indicate that line protocol is compressed in the request body. + | Header | Value type | Description | @@ -396,8 +421,10 @@ articles: Most InfluxDB API endpoints require parameters in the request--for example, specifying the database to use. + ### Common parameters + The following table shows common parameters used by many InfluxDB API endpoints. @@ -406,6 +433,7 @@ articles: request body that perform functions specific to those endpoints. + | Query parameter | Value type | Description | @@ -414,6 +442,7 @@ articles: | `db` | string | The database name | + InfluxDB HTTP API endpoints use standard HTTP request and response headers. @@ -426,6 +455,7 @@ articles: the write endpoints accept the `Content-Encoding` header to indicate that line protocol is compressed in the request body. + | Header | Value type | Description | @@ -455,6 +485,7 @@ articles: description: > Migrate your existing InfluxDB v1 or v2 workloads to InfluxDB 3. + InfluxDB 3 provides compatibility endpoints that work with InfluxDB 1.x and 2.x client libraries and tools. @@ -463,8 +494,10 @@ articles: api-compat-badge--v2">v2 badges are compatible with the respective InfluxDB version. + ### Migration guides + - [Migrate from InfluxDB v1](/influxdb3/enterprise/guides/migrate/influxdb-1x/) - For users migrating from InfluxDB 1.x @@ -487,6 +520,7 @@ articles: tagDescription: > Migrate your existing InfluxDB v1 or v2 workloads to InfluxDB 3. + InfluxDB 3 provides compatibility endpoints that work with InfluxDB 1.x and 2.x client libraries and tools. @@ -495,8 +529,10 @@ articles: api-compat-badge--v2">v2 badges are compatible with the respective InfluxDB version. + ### Migration guides + - [Migrate from InfluxDB v1](/influxdb3/enterprise/guides/migrate/influxdb-1x/) - For users migrating from InfluxDB 1.x @@ -535,13 +571,15 @@ articles: Manage Processing engine triggers, test plugins, and send requests to trigger On Request plugins. - InfluxDB 3 Enterprise provides the InfluxDB 3 processing engine, an - embedded Python VM that can dynamically load and trigger Python plugins - in response to events in your database. + + InfluxDB 3 provides the InfluxDB 3 processing engine, an embedded Python + VM that can dynamically load and trigger Python plugins in response to + events in your database. Use Processing engine plugins and triggers to run code and perform tasks for different database events. + To get started with the processing engine, see the [Processing engine and Python plugins](/influxdb3/enterprise/processing-engine/) guide. tag: Processing engine @@ -624,13 +662,15 @@ articles: Manage Processing engine triggers, test plugins, and send requests to trigger On Request plugins. - InfluxDB 3 Enterprise provides the InfluxDB 3 processing engine, an - embedded Python VM that can dynamically load and trigger Python plugins - in response to events in your database. + + InfluxDB 3 provides the InfluxDB 3 processing engine, an embedded Python + VM that can dynamically load and trigger Python plugins in response to + events in your database. Use Processing engine plugins and triggers to run code and perform tasks for different database events. + To get started with the processing engine, see the [Processing engine and Python plugins](/influxdb3/enterprise/processing-engine/) guide. source: >- @@ -683,7 +723,7 @@ articles: compatVersion: v1 externalDocs: description: Use the InfluxDB v1 HTTP query API and InfluxQL to query data - url: /influxdb3/enterprise/query-data/execute-queries/influxdb-v1-api/ + url: /influxdb/version/query-data/execute-queries/influxdb-v1-api/ - operationId: PostExecuteV1Query method: POST path: /query @@ -693,10 +733,10 @@ articles: compatVersion: v1 externalDocs: description: Use the InfluxDB v1 HTTP query API and InfluxQL to query data - url: /influxdb3/enterprise/query-data/execute-queries/influxdb-v1-api/ + url: /influxdb/version/query-data/execute-queries/influxdb-v1-api/ tagDescription: Query data using SQL or InfluxQL related: - - /influxdb3/enterprise/query-data/execute-queries/influxdb-v1-api/ + - /influxdb/version/query-data/execute-queries/influxdb-v1-api/ source: >- static/openapi/influxdb3-enterprise/tags/tags/influxdb3-enterprise-query-data.yaml staticFilePath: >- @@ -709,24 +749,31 @@ articles: description: > 1. [Create an admin token](#section/Authentication) to authorize API requests. + ```bash curl -X POST "http://localhost:8181/api/v3/configure/token/admin" ``` 2. [Check the status](#section/Server-information) of the InfluxDB server. + ```bash curl "http://localhost:8181/health" \ --header "Authorization: Bearer ADMIN_TOKEN" ``` + 3. [Write data](#operation/PostWriteLP) to InfluxDB. + ```bash curl "http://localhost:8181/api/v3/write_lp?db=sensors&precision=auto" --header "Authorization: Bearer ADMIN_TOKEN" \ --data-raw "home,room=Kitchen temp=72.0 home,room=Living\ room temp=71.5" ``` + If all data is written, the response is `204 No Content`. + 4. [Query data](#operation/GetExecuteQuerySQL) from InfluxDB. + ```bash curl -G "http://localhost:8181/api/v3/query_sql" \ --header "Authorization: Bearer ADMIN_TOKEN" \ @@ -736,11 +783,12 @@ articles: ``` Output: + ```jsonl {"room":"Living room","temp":71.5,"time":"2025-02-25T20:19:34.984098"} ``` - For more information about using InfluxDB 3 Enterprise, see the [Get + For more information about using InfluxDB 3, see the [Get started](/influxdb3/enterprise/get-started/) guide. tag: Quick start isConceptual: true @@ -749,24 +797,31 @@ articles: tagDescription: > 1. [Create an admin token](#section/Authentication) to authorize API requests. + ```bash curl -X POST "http://localhost:8181/api/v3/configure/token/admin" ``` 2. [Check the status](#section/Server-information) of the InfluxDB server. + ```bash curl "http://localhost:8181/health" \ --header "Authorization: Bearer ADMIN_TOKEN" ``` + 3. [Write data](#operation/PostWriteLP) to InfluxDB. + ```bash curl "http://localhost:8181/api/v3/write_lp?db=sensors&precision=auto" --header "Authorization: Bearer ADMIN_TOKEN" \ --data-raw "home,room=Kitchen temp=72.0 home,room=Living\ room temp=71.5" ``` + If all data is written, the response is `204 No Content`. + 4. [Query data](#operation/GetExecuteQuerySQL) from InfluxDB. + ```bash curl -G "http://localhost:8181/api/v3/query_sql" \ --header "Authorization: Bearer ADMIN_TOKEN" \ @@ -776,11 +831,12 @@ articles: ``` Output: + ```jsonl {"room":"Living room","temp":71.5,"time":"2025-02-25T20:19:34.984098"} ``` - For more information about using InfluxDB 3 Enterprise, see the [Get + For more information about using InfluxDB 3, see the [Get started](/influxdb3/enterprise/get-started/) guide. source: >- static/openapi/influxdb3-enterprise/tags/tags/influxdb3-enterprise-quick-start.yaml @@ -881,14 +937,17 @@ articles: description: > Write data to InfluxDB 3 using line protocol format. + #### Timestamp precision across write APIs + InfluxDB 3 provides multiple write endpoints for compatibility with different InfluxDB versions. The following table compares timestamp precision support across v1, v2, and v3 write APIs: + | Precision | v1 (`/write`) | v2 (`/api/v2/write`) | v3 (`/api/v3/write_lp`) | @@ -910,6 +969,7 @@ articles: | **Default** | Nanosecond | Nanosecond | **Auto** (guessed) | + All timestamps are stored internally as nanoseconds. tag: Write data isConceptual: false @@ -924,7 +984,7 @@ articles: compatVersion: v1 externalDocs: description: Use compatibility APIs to write data - url: /influxdb3/enterprise/write-data/http-api/compatibility-apis/ + url: /influxdb/version/write-data/http-api/compatibility-apis/ - operationId: PostV2Write method: POST path: /api/v2/write @@ -934,7 +994,7 @@ articles: compatVersion: v2 externalDocs: description: Use compatibility APIs to write data - url: /influxdb3/enterprise/write-data/http-api/compatibility-apis/ + url: /influxdb/version/write-data/http-api/compatibility-apis/ - operationId: PostWriteLP method: POST path: /api/v3/write_lp @@ -944,14 +1004,17 @@ articles: tagDescription: > Write data to InfluxDB 3 using line protocol format. + #### Timestamp precision across write APIs + InfluxDB 3 provides multiple write endpoints for compatibility with different InfluxDB versions. The following table compares timestamp precision support across v1, v2, and v3 write APIs: + | Precision | v1 (`/write`) | v2 (`/api/v2/write`) | v3 (`/api/v3/write_lp`) | @@ -973,9 +1036,10 @@ articles: | **Default** | Nanosecond | Nanosecond | **Auto** (guessed) | + All timestamps are stored internally as nanoseconds. related: - - /influxdb3/enterprise/write-data/http-api/compatibility-apis/ + - /influxdb/version/write-data/http-api/compatibility-apis/ source: >- static/openapi/influxdb3-enterprise/tags/tags/influxdb3-enterprise-write-data.yaml staticFilePath: >- From 60645e7e455a5967ffeb2b87b3b5545c4cf7dc95 Mon Sep 17 00:00:00 2001 From: Jason Stirnaman Date: Fri, 2 Jan 2026 10:57:46 -0600 Subject: [PATCH 51/51] feat(api): add shared overlay structure for management APIs Extend the overlay architecture to Cloud Dedicated and Clustered Management APIs, sharing ~460 lines of components (schemas, responses, security schemes) between products. Changes: - Add shared/management/base.yml with common components - Add Clustered management overlay (1323 lines) with simple paths - Add Cloud Dedicated management overlay (1617 lines) with account/cluster paths - Fix apply-overlay.js to correctly handle array replacements - Update getswagger.sh with bundleManagementWithOverlay function - Regenerate management openapi.yml files from overlays - Remove redundant content/ directories (info.yml, servers.yml, tag-groups.yml) for Core, Enterprise, Clustered, and Cloud Dedicated management APIs - Update Core and Enterprise overlays with full info and x-tagGroups The management APIs have fundamentally different path structures: - Clustered: /databases, /tokens - Cloud Dedicated: /accounts/{accountId}/clusters/{clusterId}/databases Overlay files now contain all product-specific customizations, eliminating the need for separate Redocly decorator content files. --- api-docs/getswagger.sh | 51 +- .../management/content/info.yml | 15 - .../management/content/servers.yml | 8 - .../management/content/tag-groups.yml | 6 - .../cloud-dedicated/management/openapi.yml | 50 +- .../cloud-dedicated/management/overlay.yml | 1617 +++++++++++++++++ .../clustered/management/content/info.yml | 15 - .../clustered/management/content/servers.yml | 8 - .../management/content/tag-groups.yml | 6 - .../clustered/management/openapi.yml | 46 +- .../clustered/management/overlay.yml | 1323 ++++++++++++++ api-docs/influxdb3/core/v3/content/info.yml | 34 - .../influxdb3/core/v3/content/servers.yml | 8 - .../influxdb3/core/v3/content/tag-groups.yml | 15 - api-docs/influxdb3/core/v3/overlay.yml | 76 +- api-docs/influxdb3/core/v3/ref.yml | 17 +- .../influxdb3/enterprise/v3/content/info.yml | 34 - .../enterprise/v3/content/servers.yml | 8 - .../enterprise/v3/content/tag-groups.yml | 15 - api-docs/influxdb3/enterprise/v3/overlay.yml | 77 +- api-docs/influxdb3/enterprise/v3/ref.yml | 17 +- api-docs/influxdb3/shared/management/base.yml | 460 +++++ api-docs/scripts/apply-overlay.js | 8 +- 23 files changed, 3604 insertions(+), 310 deletions(-) delete mode 100644 api-docs/influxdb3/cloud-dedicated/management/content/info.yml delete mode 100644 api-docs/influxdb3/cloud-dedicated/management/content/servers.yml delete mode 100644 api-docs/influxdb3/cloud-dedicated/management/content/tag-groups.yml create mode 100644 api-docs/influxdb3/cloud-dedicated/management/overlay.yml delete mode 100644 api-docs/influxdb3/clustered/management/content/info.yml delete mode 100644 api-docs/influxdb3/clustered/management/content/servers.yml delete mode 100644 api-docs/influxdb3/clustered/management/content/tag-groups.yml create mode 100644 api-docs/influxdb3/clustered/management/overlay.yml delete mode 100644 api-docs/influxdb3/core/v3/content/info.yml delete mode 100644 api-docs/influxdb3/core/v3/content/servers.yml delete mode 100644 api-docs/influxdb3/core/v3/content/tag-groups.yml delete mode 100644 api-docs/influxdb3/enterprise/v3/content/info.yml delete mode 100644 api-docs/influxdb3/enterprise/v3/content/servers.yml delete mode 100644 api-docs/influxdb3/enterprise/v3/content/tag-groups.yml create mode 100644 api-docs/influxdb3/shared/management/base.yml diff --git a/api-docs/getswagger.sh b/api-docs/getswagger.sh index 363c8ffddf..300aaa4b6a 100755 --- a/api-docs/getswagger.sh +++ b/api-docs/getswagger.sh @@ -155,19 +155,7 @@ function postProcess() { } function updateCloudDedicatedManagement { - outFile="influxdb3/cloud-dedicated/management/openapi.yml" - if [[ -z "$baseUrl" ]]; - then - echo "Using existing $outFile" - else - # Clone influxdata/granite and fetch the latest openapi.yaml file. - echo "Fetching the latest openapi.yaml file from influxdata/granite" - tmp_dir=$(mktemp -d) - git clone --depth 1 --branch main https://github.com/influxdata/granite.git "$tmp_dir" - cp "$tmp_dir/openapi.yaml" "$outFile" - rm -rf "$tmp_dir" - fi - postProcess $outFile 'influxdb3/cloud-dedicated/.config.yml' management@0 + bundleManagementWithOverlay "cloud-dedicated" } function updateCloudDedicatedV2 { @@ -193,19 +181,7 @@ function updateCloudServerlessV2 { } function updateClusteredManagement { - outFile="influxdb3/clustered/management/openapi.yml" - if [[ -z "$baseUrl" ]]; - then - echo "Using existing $outFile" - else - # Clone influxdata/granite and fetch the latest openapi.yaml file. - echo "Fetching the latest openapi.yaml file from influxdata/granite" - tmp_dir=$(mktemp -d) - git clone --depth 1 --branch main https://github.com/influxdata/granite.git "$tmp_dir" - cp "$tmp_dir/openapi.yaml" "$outFile" - rm -rf "$tmp_dir" - fi - postProcess $outFile 'influxdb3/clustered/.config.yml' management@0 + bundleManagementWithOverlay "clustered" } function updateClusteredV2 { @@ -243,6 +219,29 @@ function bundleWithOverlay { postProcess "$outFile" "$configFile" "${apiVersion}@3" } +# Bundle shared management base spec with product-specific overlay +# Usage: bundleManagementWithOverlay +# Example: bundleManagementWithOverlay "clustered" +function bundleManagementWithOverlay { + local product=$1 + + local baseFile="influxdb3/shared/management/base.yml" + local overlayFile="influxdb3/${product}/management/overlay.yml" + local outFile="influxdb3/${product}/management/openapi.yml" + local configFile="influxdb3/${product}/.config.yml" + + echo "Bundling ${product} management with overlay..." + + # Apply overlay to base spec (run from project root for node_modules access) + local scriptDir=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) + local projectRoot=$(dirname "$scriptDir") + + (cd "$projectRoot" && node api-docs/scripts/apply-overlay.js "api-docs/$baseFile" "api-docs/$overlayFile" -o "api-docs/$outFile") + + # Apply Redocly decorators + postProcess "$outFile" "$configFile" "management@0" +} + function updateCoreV3 { bundleWithOverlay "core" "v3" } diff --git a/api-docs/influxdb3/cloud-dedicated/management/content/info.yml b/api-docs/influxdb3/cloud-dedicated/management/content/info.yml deleted file mode 100644 index b18e6956fd..0000000000 --- a/api-docs/influxdb3/cloud-dedicated/management/content/info.yml +++ /dev/null @@ -1,15 +0,0 @@ -title: InfluxDB 3 Cloud Dedicated Management API -x-influxdata-short-title: Management API -description: | - The Management API for InfluxDB 3 Cloud Dedicated provides a programmatic interface for managing a Cloud Dedicated cluster. - The Management API lets you integrate functions such as creating and managing databases, permissions, and tokens into your workflow or application. - - This documentation is generated from the - InfluxDB OpenAPI specification. -license: - name: MIT - url: 'https://opensource.org/licenses/MIT' -contact: - name: InfluxData - url: https://www.influxdata.com - email: support@influxdata.com \ No newline at end of file diff --git a/api-docs/influxdb3/cloud-dedicated/management/content/servers.yml b/api-docs/influxdb3/cloud-dedicated/management/content/servers.yml deleted file mode 100644 index 6e97ac2807..0000000000 --- a/api-docs/influxdb3/cloud-dedicated/management/content/servers.yml +++ /dev/null @@ -1,8 +0,0 @@ -- url: 'https://{baseurl}/api/v0' - description: InfluxDB 3 Cloud Dedicated Management API URL - variables: - baseurl: - enum: - - 'console.influxdata.com' - default: 'console.influxdata.com' - description: InfluxDB 3 Cloud Dedicated Console URL diff --git a/api-docs/influxdb3/cloud-dedicated/management/content/tag-groups.yml b/api-docs/influxdb3/cloud-dedicated/management/content/tag-groups.yml deleted file mode 100644 index 57e8c8484c..0000000000 --- a/api-docs/influxdb3/cloud-dedicated/management/content/tag-groups.yml +++ /dev/null @@ -1,6 +0,0 @@ -- name: Using the Management API - tags: - - Authentication - - Quickstart -- name: All endpoints - tags: [] diff --git a/api-docs/influxdb3/cloud-dedicated/management/openapi.yml b/api-docs/influxdb3/cloud-dedicated/management/openapi.yml index a74165c29d..5099d8f854 100644 --- a/api-docs/influxdb3/cloud-dedicated/management/openapi.yml +++ b/api-docs/influxdb3/cloud-dedicated/management/openapi.yml @@ -974,11 +974,11 @@ paths: permissions: $ref: '#/components/schemas/DatabaseTokenPermissions' createdAt: - $ref: '#/components/schemas/DatabaseTokenCreatedAt' + $ref: '#/components/schemas/DateTimeRfc3339' expiresAt: - $ref: '#/components/schemas/DatabaseTokenExpiresAt' + $ref: '#/components/schemas/DateTimeRfc3339' revokedAt: - $ref: '#/components/schemas/DatabaseTokenRevokedAt' + $ref: '#/components/schemas/DateTimeRfc3339' required: - accountId - clusterId @@ -1083,7 +1083,7 @@ paths: permissions: $ref: '#/components/schemas/DatabaseTokenPermissions' expiresAt: - $ref: '#/components/schemas/DatabaseTokenExpiresAt' + $ref: '#/components/schemas/DateTimeRfc3339' required: - description examples: @@ -1130,13 +1130,13 @@ paths: permissions: $ref: '#/components/schemas/DatabaseTokenPermissions' createdAt: - $ref: '#/components/schemas/DatabaseTokenCreatedAt' + $ref: '#/components/schemas/DateTimeRfc3339' accessToken: $ref: '#/components/schemas/DatabaseTokenAccessToken' expiresAt: - $ref: '#/components/schemas/DatabaseTokenExpiresAt' + $ref: '#/components/schemas/DateTimeRfc3339' revokedAt: - $ref: '#/components/schemas/DatabaseTokenRevokedAt' + $ref: '#/components/schemas/DateTimeRfc3339' required: - accountId - clusterId @@ -1279,11 +1279,11 @@ paths: permissions: $ref: '#/components/schemas/DatabaseTokenPermissions' createdAt: - $ref: '#/components/schemas/DatabaseTokenCreatedAt' + $ref: '#/components/schemas/DateTimeRfc3339' expiresAt: - $ref: '#/components/schemas/DatabaseTokenExpiresAt' + $ref: '#/components/schemas/DateTimeRfc3339' revokedAt: - $ref: '#/components/schemas/DatabaseTokenRevokedAt' + $ref: '#/components/schemas/DateTimeRfc3339' required: - accountId - clusterId @@ -1440,11 +1440,11 @@ paths: permissions: $ref: '#/components/schemas/DatabaseTokenPermissions' createdAt: - $ref: '#/components/schemas/DatabaseTokenCreatedAt' + $ref: '#/components/schemas/DateTimeRfc3339' expiresAt: - $ref: '#/components/schemas/DatabaseTokenExpiresAt' + $ref: '#/components/schemas/DateTimeRfc3339' revokedAt: - $ref: '#/components/schemas/DatabaseTokenRevokedAt' + $ref: '#/components/schemas/DateTimeRfc3339' required: - accountId - clusterId @@ -1653,7 +1653,7 @@ components: minLength: 1 ClusterDatabaseRetentionPeriod: description: | - The retention period of the [cluster database](/influxdb3/cloud-dedicated/admin/databases/) in nanoseconds, if applicable + The retention period of the [cluster database](/influxdb/version/admin/databases/) in nanoseconds, if applicable If the retention period is not set or is set to 0, the database will have infinite retention type: integer @@ -1683,11 +1683,11 @@ components: minimum: 1 ClusterDatabasePartitionTemplate: description: | - A template for [partitioning](/influxdb3/cloud-dedicated/admin/custom-partitions/) a cluster database. + A template for [partitioning](/influxdb/version/admin/custom-partitions/) a cluster database. - Each partition template part is evaluated in sequence. - The outputs from each part are concatenated with the - `|` delimiter to form the final partition key. + Each template part is evaluated in sequence, concatenating the final + partition key from the output of each part, delimited by the partition + key delimiter `|`. For example, using the partition template below: @@ -1727,7 +1727,7 @@ components: * `time=2023-01-01, a=` -> `2023|#|!|!` * `time=2023-01-01, c=` -> `2023|!|!|` - When using the default [partitioning](/influxdb3/cloud-dedicated/admin/custom-partitions/) template (YYYY-MM-DD) there is no + When using the default [partitioning](/influxdb/version/admin/custom-partitions/) template (YYYY-MM-DD) there is no encoding necessary, as the derived partition key contains a single part, and no reserved characters. [`TemplatePart::Bucket`] parts by definition will always be within the part length limit and contain no restricted characters @@ -1829,7 +1829,7 @@ components: tagName: c numberOfBuckets: 10 ClusterDatabaseTableName: - description: The name of the [cluster database](/influxdb3/cloud-dedicated/admin/databases/) table + description: The name of the [cluster database](/influxdb/version/admin/databases/) table type: string examples: - TableOne @@ -1842,15 +1842,15 @@ components: - Limited Access Token - Full Access Token DatabaseTokenResourceAllDatabases: - description: A resource value for a [database token](/influxdb3/cloud-dedicated/admin/tokens/database/) permission that refers to all databases + description: A resource value for a [database token](/influxdb/version/admin/tokens/database/) permission that refers to all databases type: string enum: - '*' DatabaseTokenPermissionAction: - description: The action the [database token](/influxdb3/cloud-dedicated/admin/tokens/database/) permission allows + description: The action the [database token](/influxdb/version/admin/tokens/database/) permission allows type: string DatabaseTokenPermissionResource: - description: The resource the [database token](/influxdb3/cloud-dedicated/admin/tokens/database/) permission applies to + description: The resource the [database token](/influxdb/version/admin/tokens/database/) permission applies to anyOf: - $ref: '#/components/schemas/ClusterDatabaseName' - $ref: '#/components/schemas/DatabaseTokenResourceAllDatabases' @@ -1874,7 +1874,7 @@ components: - action: write resource: '*' DatabaseTokenPermissions: - description: The list of permissions the [database token](/influxdb3/cloud-dedicated/admin/tokens/database/) allows + description: The list of permissions the [database token](/influxdb/version/admin/tokens/database/) allows type: array items: $ref: '#/components/schemas/DatabaseTokenPermission' @@ -1887,7 +1887,7 @@ components: resource: '*' DatabaseTokenCreatedAt: description: | - The date and time that the [database token](/influxdb3/cloud-dedicated/admin/tokens/database/) was created + The date and time that the [database token](/influxdb/version/admin/tokens/database/) was created Uses RFC3339 format $ref: '#/components/schemas/DateTimeRfc3339' diff --git a/api-docs/influxdb3/cloud-dedicated/management/overlay.yml b/api-docs/influxdb3/cloud-dedicated/management/overlay.yml new file mode 100644 index 0000000000..b53eb114fe --- /dev/null +++ b/api-docs/influxdb3/cloud-dedicated/management/overlay.yml @@ -0,0 +1,1617 @@ +overlay: 1.0.0 +info: + title: Cloud Dedicated Management API overlay + version: 1.0.0 +actions: + - target: $.info + update: + title: InfluxDB 3 Cloud Dedicated Management API + description: | + The Management API for InfluxDB 3 Cloud Dedicated provides a programmatic interface for managing a Cloud Dedicated cluster. + The Management API lets you integrate functions such as creating and managing databases, permissions, and tokens into your workflow or application. + + This documentation is generated from the + InfluxDB OpenAPI specification. + version: '' + license: + name: MIT + url: https://opensource.org/licenses/MIT + contact: + name: InfluxData + url: https://www.influxdata.com + email: support@influxdata.com + - target: $.servers + update: + - url: https://{baseurl}/api/v0 + description: InfluxDB 3 Cloud Dedicated Management API URL + variables: + baseurl: + enum: + - console.influxdata.com + default: console.influxdata.com + description: InfluxDB 3 Cloud Dedicated Console URL + - target: $.tags + update: + - name: Authentication + x-traitTag: true + description: | + With InfluxDB 3 Cloud Dedicated, the InfluxDB Management API endpoints require the following credentials: + + - `ACCOUNT_ID`: The ID of the [account](/influxdb3/cloud-dedicated/get-started/setup/#request-an-influxdb-cloud-dedicated-cluster) that the cluster belongs to. To view account ID and cluster ID, [list cluster details](/influxdb3/cloud-dedicated/admin/clusters/list/#detailed-output-in-json). + - `CLUSTER_ID`: The ID of the [cluster](/influxdb3/cloud-dedicated/get-started/setup/#request-an-influxdb-cloud-dedicated-cluster) that you want to manage. To view account ID and cluster ID, [list cluster details](/influxdb3/cloud-dedicated/admin/clusters/list/#detailed-output-in-json). + - `Authorization MANAGEMENT_TOKEN`: the `Authorization` HTTP header with a [management token](/influxdb3/cloud-dedicated/admin/tokens/management/). + + See how to [create a management token](/influxdb3/cloud-dedicated/admin/tokens/management/). + + By default, management tokens in InfluxDB 3 are short-lived tokens issued by an OAuth2 identity provider that grant a specific user administrative access to your InfluxDB cluster. However, for automation purposes, you can manually create management tokens that authenticate directly with your InfluxDB cluster and do not require human interaction with your identity provider. + + - name: Database tokens + description: Manage database read/write tokens for a cluster + - name: Databases + description: Manage databases for a cluster + - name: Quickstart + x-traitTag: true + description: | + The following example script shows how to use `curl` to make database and token management requests: + + ```shell + #!/bin/bash + + # Usage: + # Note the leading space in the command below to keep secrets out of the shell history + # + # ``` + # MANAGEMENT_TOKEN= ACCOUNT_ID= CLUSTER_ID= ./scripts/test_http_api_v0_endpoints.sh + # ``` + + # Env var validation + if [ -z "${MANAGEMENT_TOKEN}" ]; then + echo " + [Error]: ❌ + \$MANAGEMENT_TOKEN env var is required. + " + exit 1 + fi + + if [ -z "${ACCOUNT_ID}" ]; then + echo " + [Error]: ❌ + \$ACCOUNT_ID env var is required. + " + exit 1 + fi + + if [ -z "${CLUSTER_ID}" ]; then + echo " + [Error]: ❌ + \$CLUSTER_ID env var is required. + " + exit 1 + fi + + HOST="https://console.influxdata.com" + + # Database request functions + list_databases () { + local response=$( \ + curl \ + --location "$HOST/api/v0/accounts/$ACCOUNT_ID/clusters/$CLUSTER_ID/databases" \ + --header "Accept: application/json" \ + --header "Authorization: Bearer $MANAGEMENT_TOKEN" \ + ) + echo "$response" + } + + create_database () { + local databaseName=$1 + local response=$( \ + curl \ + --location "$HOST/api/v0/accounts/$ACCOUNT_ID/clusters/$CLUSTER_ID/databases" \ + --header "Accept: application/json" \ + --header 'Content-Type: application/json' \ + --header "Authorization: Bearer $MANAGEMENT_TOKEN" \ + --data '{ + "name": "'$databaseName'", + "maxTables": 75, + "maxColumnsPerTable": 90, + "retentionPeriod": 600000000000, + "partitionTemplate": [ + { + "type": "tag", + "value": "abc" + }, + { + "type": "bucket", + "value": { + "tagName": "def", + "numberOfBuckets": 5 + } + } + ] + }' \ + ) + echo "$response" + } + + update_database () { + local databaseName=$1 + local response=$( \ + curl \ + --location "$HOST/api/v0/accounts/$ACCOUNT_ID/clusters/$CLUSTER_ID/databases/$databaseName" \ + --request PATCH \ + --header "Accept: application/json" \ + --header 'Content-Type: application/json' \ + --header "Authorization: Bearer $MANAGEMENT_TOKEN" \ + --data '{ + "maxTables": 150, + "maxColumnsPerTable": 180, + "retentionPeriod": 1200000000000 + }' \ + ) + echo "$response" + } + + delete_database () { + local databaseName=$1 + local response=$( \ + curl \ + --location "$HOST/api/v0/accounts/$ACCOUNT_ID/clusters/$CLUSTER_ID/databases/$databaseName" \ + --request DELETE \ + --header "Accept: application/json" \ + --header "Authorization: Bearer $MANAGEMENT_TOKEN" \ + ) + echo "$response" + } + + # Token request functions + list_tokens () { + local response=$( \ + curl \ + --location "$HOST/api/v0/accounts/$ACCOUNT_ID/clusters/$CLUSTER_ID/tokens" \ + --header "Accept: application/json" \ + --header "Authorization: Bearer $MANAGEMENT_TOKEN" \ + ) + echo "$response" + } + + create_token () { + local response=$( \ + curl \ + --location "$HOST/api/v0/accounts/$ACCOUNT_ID/clusters/$CLUSTER_ID/tokens" \ + --header "Accept: application/json" \ + --header 'Content-Type: application/json' \ + --header "Authorization: Bearer $MANAGEMENT_TOKEN" \ + --data '{ + "description": "my test token", + "permissions": [ + { + "action": "write", + "resource": "database_one" + }, + { + "action": "read", + "resource": "database_two" + } + ] + }' \ + ) + echo "$response" + } + + get_token () { + local token_id=$1 + local response=$( \ + curl \ + --location "$HOST/api/v0/accounts/$ACCOUNT_ID/clusters/$CLUSTER_ID/tokens/$tokenId" \ + --header "Accept: application/json" \ + --header "Authorization: Bearer $MANAGEMENT_TOKEN" \ + ) + echo "$response" + } + + update_token () { + local token_id=$1 + local response=$( \ + curl \ + --location "$HOST/api/v0/accounts/$ACCOUNT_ID/clusters/$CLUSTER_ID/tokens/$tokenId" \ + --request PATCH \ + --header "Accept: application/json" \ + --header 'Content-Type: application/json' \ + --header "Authorization: Bearer $MANAGEMENT_TOKEN" \ + --data '{ + "description": "my updated test token", + "permissions": [ + { + "action": "database_one", + "resource": "read" + } + ] + }' \ + ) + echo "$response" + } + + delete_token () { + local token_id=$1 + local response=$( \ + curl \ + --location "$HOST/api/v0/accounts/$ACCOUNT_ID/clusters/$CLUSTER_ID/tokens/$tokenId" \ + --request DELETE \ + --header "Accept: application/json" \ + --header "Authorization: Bearer $MANAGEMENT_TOKEN" \ + ) + echo "$response" + } + + + # Test database endpoints + databaseName="test_database_$RANDOM" + + printf "\n🏗️ Creating database... 🏗️\n\n" + response="$(create_database $databaseName)" + echo $response | jq + printf "\n🏗️ Creating database successful 🏗️\n\n" + + printf "\n⬆️ Updating database... ⬆️\n\n" + response="$(update_database $databaseName)" + echo $response | jq + printf "\n⬆️ Updating database successful ⬆️\n\n" + + printf "\n⬇️ Listing databases... ⬇️\n\n" + response="$(list_databases)" + echo $response | jq + printf "\n⬇️ Listing databases successful ⬇️\n\n" + + printf "\n🗑️ Deleting database... 🗑️\n\n" + response="$(delete_database $databaseName)" + echo $response | jq + printf "\n🗑️ Deleting database successful 🗑️\n\n" + + + # Test token endpoints + printf "\n🏗️ Creating token... 🏗️\n\n" + response="$(create_token)" + echo $response | jq + tokenId=$(echo $response | jq '.id') + printf "\n🏗️ Creating token successful 🏗️\n\n" + + printf "\n⬇️ Getting token... ⬇️\n\n" + response="$(get_token $tokenId)" + echo $response | jq + printf "\n⬇️ Getting token successful ⬇️\n\n" + + printf "\n⬆️ Updating token... ⬆️\n\n" + response="$(update_token $tokenId)" + echo $response | jq + printf "\n⬆️ Updating token successful ⬆️\n\n" + + printf "\n📋 Listing tokens... 📋\n\n" + response="$(list_tokens)" + echo $response | jq + printf "\n📋 Listing tokens successful 📋\n\n" + + printf "\n🗑️ Deleting token... 🗑️\n\n" + response="$(delete_token $tokenId)" + echo $response | jq + printf "\n🗑️ Deleting token successful 🗑️\n\n" + ``` + - name: Tables + description: Manage tables in a database + - target: $.paths['/accounts/{accountId}/clusters/{clusterId}/databases'] + update: + get: + operationId: GetClusterDatabases + summary: Get all databases for a cluster + parameters: + - name: accountId + in: path + description: The ID of the [account](/influxdb3/cloud-dedicated/get-started/setup/#request-an-influxdb-cloud-dedicated-cluster) to get the [databases](/influxdb3/cloud-dedicated/admin/databases/) for + required: true + schema: + $ref: '#/components/schemas/UuidV4' + - name: clusterId + in: path + description: The ID of the cluster to get the [databases](/influxdb3/cloud-dedicated/admin/databases/) for + required: true + schema: + $ref: '#/components/schemas/UuidV4' + responses: + '200': + description: The cluster databases were successfully retrieved + content: + application/json: + schema: + type: array + items: + type: object + properties: + accountId: + description: The ID of the [account](/influxdb3/cloud-dedicated/get-started/setup/#request-an-influxdb-cloud-dedicated-cluster) that the database belongs to + $ref: '#/components/schemas/UuidV4' + clusterId: + description: The ID of the cluster that the database belongs to + $ref: '#/components/schemas/UuidV4' + name: + $ref: '#/components/schemas/ClusterDatabaseName' + maxTables: + $ref: '#/components/schemas/ClusterDatabaseMaxTables' + maxColumnsPerTable: + $ref: '#/components/schemas/ClusterDatabaseMaxColumnsPerTable' + retentionPeriod: + $ref: '#/components/schemas/ClusterDatabaseRetentionPeriod' + partitionTemplate: + $ref: '#/components/schemas/ClusterDatabasePartitionTemplate' + required: + - accountId + - clusterId + - name + - maxTables + - maxColumnsPerTable + - retentionPeriod + example: + - accountId: 11111111-1111-4111-8111-111111111111 + clusterId: 33333333-3333-4333-8333-333333333333 + name: DatabaseOne + maxTables: 500 + maxColumnsPerTable: 200 + retentionPeriod: 0 + - accountId: 11111111-1111-4111-8111-111111111111 + clusterId: 33333333-3333-4333-8333-333333333333 + name: DatabaseTwo + maxTables: 100 + maxColumnsPerTable: 50 + retentionPeriod: 300000000000 + partitionTemplate: + - type: time + value: '%Y' + - type: tag + value: bananas + - type: tag + value: plátanos + - type: bucket + value: + tagName: c + numberOfBuckets: 10 + '400': + $ref: '#/components/responses/BadRequest' + '401': + $ref: '#/components/responses/Unauthorized' + '403': + $ref: '#/components/responses/Forbidden' + '404': + $ref: '#/components/responses/NotFound' + '500': + $ref: '#/components/responses/InternalServerError' + x-codeSamples: + - label: '' + lang: Shell + source: | + HOST="https://console.influxdata.com" + + list_databases () { + local response=$( \ + curl \ + --location "$HOST/api/v0/accounts/$ACCOUNT_ID/clusters/$CLUSTER_ID/databases" \ + --header "Accept: application/json" \ + --header "Authorization: Bearer $MANAGEMENT_TOKEN" \ + ) + echo "$response" + } + tags: + - Databases + post: + operationId: CreateClusterDatabase + summary: Create a database + description: | + Create a database for a cluster. + + The database name must be unique within the cluster. + + **Default maximum number of columns**: 250 + **Default maximum number of tables**: 500 + + The retention period is specified in nanoseconds. For example, to set a retention period of 1 hour, use `3600000000000`. + + InfluxDB Cloud Dedicated lets you define a [custom partitioning](/influxdb3/cloud-dedicated/admin/custom-partitions/) strategy for each database and table. + A _partition_ is a logical grouping of data stored in [Apache Parquet](https://parquet.apache.org/). + By default, data is partitioned by day, + but, depending on your schema and workload, customizing the partitioning + strategy can improve query performance. + + To use custom partitioning, you define a [partition template](/influxdb3/cloud-dedicated/admin/custom-partitions/partition-templates/). + If a table doesn't have a custom partition template, it inherits the database's template. + The partition template is set at the time of database creation and cannot be changed later. + For more information, see [Custom partitions](/influxdb3/cloud-dedicated/admin/custom-partitions/). + tags: + - Databases + parameters: + - name: accountId + in: path + description: The ID of the [account](/influxdb3/cloud-dedicated/get-started/setup/#request-an-influxdb-cloud-dedicated-cluster) to create the database for + required: true + schema: + $ref: '#/components/schemas/UuidV4' + - name: clusterId + in: path + description: The ID of the cluster to create the database for + required: true + schema: + $ref: '#/components/schemas/UuidV4' + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + name: + $ref: '#/components/schemas/ClusterDatabaseName' + maxTables: + $ref: '#/components/schemas/ClusterDatabaseMaxTables' + maxColumnsPerTable: + $ref: '#/components/schemas/ClusterDatabaseMaxColumnsPerTable' + retentionPeriod: + $ref: '#/components/schemas/ClusterDatabaseRetentionPeriod' + partitionTemplate: + $ref: '#/components/schemas/ClusterDatabasePartitionTemplate' + required: + - name + examples: + requiredFieldsOnly: + summary: Required Fields Only + value: + name: DatabaseOne + allFields: + summary: All Fields + value: + name: DatabaseTwo + maxTables: 100 + maxColumnsPerTable: 50 + retentionPeriod: 300000000000 + partitionTemplate: + - type: time + value: '%Y' + - type: tag + value: bananas + - type: tag + value: plátanos + - type: bucket + value: + tagName: c + numberOfBuckets: 10 + responses: + '200': + description: The cluster database was successfully created + content: + application/json: + schema: + type: object + properties: + accountId: + description: The ID of the [account](/influxdb3/cloud-dedicated/get-started/setup/#request-an-influxdb-cloud-dedicated-cluster) that the database belongs to + $ref: '#/components/schemas/UuidV4' + clusterId: + description: The ID of the cluster that the database belongs to + $ref: '#/components/schemas/UuidV4' + name: + $ref: '#/components/schemas/ClusterDatabaseName' + maxTables: + $ref: '#/components/schemas/ClusterDatabaseMaxTables' + maxColumnsPerTable: + $ref: '#/components/schemas/ClusterDatabaseMaxColumnsPerTable' + retentionPeriod: + $ref: '#/components/schemas/ClusterDatabaseRetentionPeriod' + partitionTemplate: + $ref: '#/components/schemas/ClusterDatabasePartitionTemplate' + required: + - accountId + - clusterId + - name + - maxTables + - maxColumnsPerTable + - retentionPeriod + examples: + requiredFieldsOnly: + summary: Required Fields Only + value: + accountId: 11111111-1111-4111-8111-111111111111 + clusterId: 33333333-3333-4333-8333-333333333333 + name: DatabaseOne + maxTables: 500 + maxColumnsPerTable: 200 + retentionPeriod: 0 + allFields: + summary: All Fields + value: + accountId: 11111111-1111-4111-8111-111111111111 + clusterId: 33333333-3333-4333-8333-333333333333 + name: DatabaseTwo + maxTables: 100 + maxColumnsPerTable: 50 + retentionPeriod: 300000000000 + partitionTemplate: + - type: time + value: '%Y' + - type: tag + value: a + - type: tag + value: c + - type: bucket + value: + tagName: c + numberOfBuckets: 10 + '400': + $ref: '#/components/responses/BadRequest' + '401': + $ref: '#/components/responses/Unauthorized' + '403': + $ref: '#/components/responses/Forbidden' + '404': + $ref: '#/components/responses/NotFound' + '409': + $ref: '#/components/responses/Conflict' + '500': + $ref: '#/components/responses/InternalServerError' + x-codeSamples: + - label: cURL + lang: Shell + source: | + HOST="https://console.influxdata.com" + + create_database () { + local databaseName=$1 + local response=$( \ + curl \ + --location "$HOST/api/v0/accounts/$ACCOUNT_ID/clusters/$CLUSTER_ID/databases" \ + --header "Accept: application/json" \ + --header 'Content-Type: application/json' \ + --header "Authorization: Bearer $MANAGEMENT_TOKEN" \ + --data '{ + "name": "'$databaseName'", + "maxTables": 75, + "maxColumnsPerTable": 90, + "retentionPeriod": 600000000000, + "partitionTemplate": [ + { + "type": "tag", + "value": "abc" + }, + { + "type": "bucket", + "value": { + "tagName": "def", + "numberOfBuckets": 5 + } + } + ] + }' \ + ) + echo "$response" + } + - target: $.paths['/accounts/{accountId}/clusters/{clusterId}/databases/{databaseName}'] + update: + patch: + operationId: UpdateClusterDatabase + summary: Update a database + tags: + - Databases + parameters: + - name: accountId + in: path + description: The ID of the [account](/influxdb3/cloud-dedicated/get-started/setup/#request-an-influxdb-cloud-dedicated-cluster) that the database belongs to + required: true + schema: + $ref: '#/components/schemas/UuidV4' + - name: clusterId + in: path + description: The ID of the cluster that the database belongs to + required: true + schema: + $ref: '#/components/schemas/UuidV4' + - name: databaseName + in: path + description: The name of the database to update + required: true + schema: + $ref: '#/components/schemas/ClusterDatabaseName' + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + maxTables: + $ref: '#/components/schemas/ClusterDatabaseMaxTables' + maxColumnsPerTable: + $ref: '#/components/schemas/ClusterDatabaseMaxColumnsPerTable' + retentionPeriod: + $ref: '#/components/schemas/ClusterDatabaseRetentionPeriod' + minProperties: 1 + examples: + allFields: + summary: Update All Fields + value: + maxTables: 300 + maxColumnsPerTable: 150 + retentionPeriod: 600000000000 + maxTablesOnly: + summary: Update Max Tables Only + value: + maxTables: 300 + maxColumnsPerTableOnly: + summary: Update Max Columns Per Table Only + value: + maxColumnsPerTable: 150 + retentionPeriodOnly: + summary: Update Retention Period Only + value: + retentionPeriod: 600000000000 + responses: + '200': + description: The cluster database was successfully updated. + content: + application/json: + schema: + type: object + properties: + accountId: + description: The ID of the [account](/influxdb3/cloud-dedicated/get-started/setup/#request-an-influxdb-cloud-dedicated-cluster) that the database belongs to + $ref: '#/components/schemas/UuidV4' + clusterId: + description: The ID of the cluster that the database belongs to + $ref: '#/components/schemas/UuidV4' + name: + $ref: '#/components/schemas/ClusterDatabaseName' + maxTables: + $ref: '#/components/schemas/ClusterDatabaseMaxTables' + maxColumnsPerTable: + $ref: '#/components/schemas/ClusterDatabaseMaxColumnsPerTable' + retentionPeriod: + $ref: '#/components/schemas/ClusterDatabaseRetentionPeriod' + required: + - accountId + - clusterId + - maxTables + - maxColumnsPerTable + - retentionPeriod + - name + examples: + allFields: + summary: Update All Fields + value: + accountId: 11111111-1111-4111-8111-111111111111 + clusterId: 33333333-3333-4333-8333-333333333333 + name: DatabaseOne + maxTables: 300 + maxColumnsPerTable: 150 + retentionPeriod: 600000000000 + maxTablesOnly: + summary: Update Max Tables Only + value: + accountId: 11111111-1111-4111-8111-111111111111 + clusterId: 33333333-3333-4333-8333-333333333333 + name: DatabaseOne + maxTables: 300 + maxColumnsPerTable: 200 + retentionPeriod: 0 + maxColumnsPerTableOnly: + summary: Update Max Columns Per Table Only + value: + accountId: 11111111-1111-4111-8111-111111111111 + clusterId: 33333333-3333-4333-8333-333333333333 + name: DatabaseOne + maxTables: 500 + maxColumnsPerTable: 150 + retentionPeriod: 0 + retentionPeriodOnly: + summary: Update Retention Period Only + value: + accountId: 11111111-1111-4111-8111-111111111111 + clusterId: 33333333-3333-4333-8333-333333333333 + name: DatabaseOne + maxTables: 500 + maxColumnsPerTable: 200 + retentionPeriod: 600000000000 + '400': + $ref: '#/components/responses/BadRequest' + '401': + $ref: '#/components/responses/Unauthorized' + '403': + $ref: '#/components/responses/Forbidden' + '404': + $ref: '#/components/responses/NotFound' + '500': + $ref: '#/components/responses/InternalServerError' + x-codeSamples: + - label: cURL + lang: Shell + source: | + HOST="https://console.influxdata.com" + + update_database () { + local databaseName=$1 + local response=$( \ + curl \ + --location "$HOST/api/v0/accounts/$ACCOUNT_ID/clusters/$CLUSTER_ID/databases/$databaseName" \ + --request PATCH \ + --header "Accept: application/json" \ + --header 'Content-Type: application/json' \ + --header "Authorization: Bearer $MANAGEMENT_TOKEN" \ + --data '{ + "maxTables": 150, + "maxColumnsPerTable": 180, + "retentionPeriod": 1200000000000 + }' \ + ) + echo "$response" + } + delete: + operationId: DeleteClusterDatabase + summary: Delete a database + tags: + - Databases + parameters: + - name: accountId + in: path + description: The ID of the [account](/influxdb3/cloud-dedicated/get-started/setup/#request-an-influxdb-cloud-dedicated-cluster) that the database belongs to + required: true + schema: + $ref: '#/components/schemas/UuidV4' + - name: clusterId + in: path + description: The ID of the cluster that the database belongs to + required: true + schema: + $ref: '#/components/schemas/UuidV4' + - name: databaseName + in: path + description: The name of the database to delete + required: true + schema: + $ref: '#/components/schemas/ClusterDatabaseName' + responses: + '204': + description: The cluster database was successfully deleted + $ref: '#/components/responses/NoContent' + '400': + $ref: '#/components/responses/BadRequest' + '401': + $ref: '#/components/responses/Unauthorized' + '403': + $ref: '#/components/responses/Forbidden' + '404': + $ref: '#/components/responses/NotFound' + '500': + $ref: '#/components/responses/InternalServerError' + x-codeSamples: + - label: cURL + lang: Shell + source: | + HOST="https://console.influxdata.com" + + delete_database () { + local databaseName=$1 + local response=$( \ + curl \ + --location "$HOST/api/v0/accounts/$ACCOUNT_ID/clusters/$CLUSTER_ID/databases/$databaseName" \ + --request DELETE \ + --header "Accept: application/json" \ + --header "Authorization: Bearer $MANAGEMENT_TOKEN" \ + ) + echo "$response" + } + - target: $.paths['/accounts/{accountId}/clusters/{clusterId}/databases/{databaseName}/tables'] + update: + post: + operationId: CreateClusterDatabaseTable + summary: Create a database table + description: | + Create a table. The database must already exist. With InfluxDB Cloud Dedicated, tables and measurements are synonymous. + + Typically, tables are created automatically on write using the measurement name + specified in line protocol written to InfluxDB. + However, to apply a [custom partition template](/influxdb3/cloud-dedicated/admin/custom-partitions/) + to a table, you must manually [create the table with custom partitioning](/influxdb3/cloud-dedicated/admin/tables/#create-a-table-with-custom-partitioning) before you write any data to it. + + Partitioning defaults to `%Y-%m-%d` (daily). + When a partition template is applied to a database, it becomes the default template + for all tables in that database, but can be overridden when creating a + table. + tags: + - Tables + parameters: + - name: accountId + in: path + description: The ID of the [account](/influxdb3/cloud-dedicated/get-started/setup/#request-an-influxdb-cloud-dedicated-cluster) to create the database table for + required: true + schema: + $ref: '#/components/schemas/UuidV4' + - name: clusterId + in: path + description: The ID of the cluster to create the database table for + required: true + schema: + $ref: '#/components/schemas/UuidV4' + - name: databaseName + in: path + description: The name of the database to create the database table for + required: true + schema: + $ref: '#/components/schemas/ClusterDatabaseName' + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + name: + $ref: '#/components/schemas/ClusterDatabaseTableName' + partitionTemplate: + $ref: '#/components/schemas/ClusterDatabasePartitionTemplate' + required: + - name + examples: + requiredFieldsOnly: + summary: Required Fields Only + value: + name: TableOne + allFields: + summary: All Fields + value: + name: TableTwo + partitionTemplate: + - type: time + value: '%Y' + - type: tag + value: bananas + - type: tag + value: plátanos + - type: bucket + value: + tagName: c + numberOfBuckets: 10 + responses: + '200': + description: The cluster database table was successfully created + content: + application/json: + schema: + type: object + properties: + accountId: + description: The ID of the [account](/influxdb3/cloud-dedicated/get-started/setup/#request-an-influxdb-cloud-dedicated-cluster) that the database table belongs to + $ref: '#/components/schemas/UuidV4' + clusterId: + description: The ID of the cluster that the database table belongs to + $ref: '#/components/schemas/UuidV4' + databaseName: + description: The name of the database that the database table belongs to + $ref: '#/components/schemas/ClusterDatabaseName' + name: + description: The name of the database table + $ref: '#/components/schemas/ClusterDatabaseTableName' + partitionTemplate: + $ref: '#/components/schemas/ClusterDatabasePartitionTemplate' + required: + - accountId + - clusterId + - databaseName + - name + examples: + requiredFieldsOnly: + summary: Required Fields Only + value: + accountId: 11111111-1111-4111-8111-111111111111 + clusterId: 33333333-3333-4333-8333-333333333333 + databaseName: DatabaseOne + name: TableOne + allFields: + summary: All Fields + value: + accountId: 11111111-1111-4111-8111-111111111111 + clusterId: 33333333-3333-4333-8333-333333333333 + databaseName: DatabaseOne + name: TableTwo + partitionTemplate: + - type: time + value: '%Y' + - type: tag + value: a + - type: tag + value: c + - type: bucket + value: + tagName: c + numberOfBuckets: 10 + '400': + $ref: '#/components/responses/BadRequest' + '401': + $ref: '#/components/responses/Unauthorized' + '403': + $ref: '#/components/responses/Forbidden' + '404': + $ref: '#/components/responses/NotFound' + '409': + $ref: '#/components/responses/Conflict' + '500': + $ref: '#/components/responses/InternalServerError' + - target: $.paths['/accounts/{accountId}/clusters/{clusterId}/tokens'] + update: + get: + operationId: GetDatabaseTokens + summary: Get all database tokens for a cluster + tags: + - Database tokens + parameters: + - name: accountId + in: path + description: The ID of the [account](/influxdb3/cloud-dedicated/get-started/setup/#request-an-influxdb-cloud-dedicated-cluster) to get the [database tokens](/influxdb3/cloud-dedicated/admin/tokens/database/) for + required: true + schema: + $ref: '#/components/schemas/UuidV4' + - name: clusterId + in: path + description: The ID of the cluster to get the [database tokens](/influxdb3/cloud-dedicated/admin/tokens/database/) for + required: true + schema: + $ref: '#/components/schemas/UuidV4' + responses: + '200': + description: The database tokens were successfully retrieved + content: + application/json: + schema: + type: array + items: + type: object + properties: + accountId: + description: The ID of the [account](/influxdb3/cloud-dedicated/get-started/setup/#request-an-influxdb-cloud-dedicated-cluster) that the [database token](/influxdb3/cloud-dedicated/admin/tokens/database/) belongs to + $ref: '#/components/schemas/UuidV4' + clusterId: + description: The ID of the cluster that the [database token](/influxdb3/cloud-dedicated/admin/tokens/database/) belongs to + $ref: '#/components/schemas/UuidV4' + id: + description: The ID of the database token + $ref: '#/components/schemas/UuidV4' + description: + $ref: '#/components/schemas/DatabaseTokenDescription' + permissions: + $ref: '#/components/schemas/DatabaseTokenPermissions' + createdAt: + $ref: '#/components/schemas/DatabaseTokenCreatedAt' + expiresAt: + $ref: '#/components/schemas/DatabaseTokenExpiresAt' + revokedAt: + $ref: '#/components/schemas/DatabaseTokenRevokedAt' + required: + - accountId + - clusterId + - id + - description + - permissions + - createdAt + example: + - accountId: 11111111-1111-4111-8111-111111111111 + clusterId: 33333333-3333-4333-8333-333333333333 + id: 55555555-5555-4555-8555-555555555555 + description: Limited Access Token + permissions: + - action: read + resource: DatabaseOne + - action: write + resource: DatabaseTwo + createdAt: '2023-12-21T17:32:28.000Z' + - accountId: 11111111-1111-4111-8111-111111111111 + clusterId: 33333333-3333-4333-8333-333333333333 + id: 66666666-6666-4666-8666-666666666666 + description: Full Access Token + permissions: + - action: write + resource: '*' + createdAt: '2024-03-02T04:20:19.000Z' + - accountId: 11111111-1111-4111-8111-111111111111 + clusterId: 33333333-3333-4333-8333-333333333333 + id: 77777777-7777-4777-8777-777777777777 + description: No Access Token + permissions: [] + createdAt: '2024-03-02T04:20:19.000Z' + '400': + $ref: '#/components/responses/BadRequest' + '401': + $ref: '#/components/responses/Unauthorized' + '403': + $ref: '#/components/responses/Forbidden' + '404': + $ref: '#/components/responses/NotFound' + '500': + $ref: '#/components/responses/InternalServerError' + x-codeSamples: + - label: cURL + lang: Shell + source: | + HOST="https://console.influxdata.com" + + list_tokens () { + local response=$( \ + curl \ + --location "$HOST/api/v0/accounts/$ACCOUNT_ID/clusters/$CLUSTER_ID/tokens" \ + --header "Accept: application/json" \ + --header "Authorization: Bearer $MANAGEMENT_TOKEN" \ + ) + echo "$response" + } + post: + operationId: CreateDatabaseToken + summary: Create a database token + tags: + - Database tokens + description: | + Create a [database token](/influxdb3/cloud-dedicated/admin/tokens/database/) for a cluster. + + The token returned on the `accessToken` property in the response can be used to authenticate query and write requests to the cluster. + + ### Notable behaviors + + - InfluxDB might take some time--from a few seconds to a few minutes--to activate and synchronize new tokens. If a new database token doesn't immediately work (you receive a `401 Unauthorized` error) for querying or writing, wait and then try your request again. + + - Token strings are viewable _only_ on token creation and aren't stored by InfluxDB; you can't recover a lost token. + + #### Store secure tokens in a secret store + + We recommend storing database tokens in a **secure secret store**. + For example, see how to [authenticate Telegraf using tokens in your OS secret store](https://github.com/influxdata/telegraf/tree/master/plugins/secretstores/os). + + If you lose a token, [delete the token from InfluxDB](/influxdb3/cloud-dedicated/admin/tokens/database/delete/) and create a new one. + parameters: + - name: accountId + in: path + description: The ID of the [account](/influxdb3/cloud-dedicated/get-started/setup/#request-an-influxdb-cloud-dedicated-cluster) to create the [database token](/influxdb3/cloud-dedicated/admin/tokens/database/) for + required: true + schema: + $ref: '#/components/schemas/UuidV4' + - name: clusterId + in: path + description: The ID of the cluster to create the [database token](/influxdb3/cloud-dedicated/admin/tokens/database/) for + required: true + schema: + $ref: '#/components/schemas/UuidV4' + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + description: + $ref: '#/components/schemas/DatabaseTokenDescription' + permissions: + $ref: '#/components/schemas/DatabaseTokenPermissions' + expiresAt: + $ref: '#/components/schemas/DatabaseTokenExpiresAt' + required: + - description + examples: + limitedAccessToken: + summary: Limited Access Token + value: + description: Limited Access Token + permissions: + - action: read + resource: DatabaseOne + - action: write + resource: DatabaseTwo + fullAccessToken: + summary: Full Access Token + value: + description: Full Access Token + permissions: + - action: write + resource: '*' + noAccessToken: + summary: No Access Token + value: + description: No Access Token + permissions: [] + responses: + '200': + description: The database token was successfully created + content: + application/json: + schema: + type: object + properties: + accountId: + description: The ID of the [account](/influxdb3/cloud-dedicated/get-started/setup/#request-an-influxdb-cloud-dedicated-cluster) that the [database token](/influxdb3/cloud-dedicated/admin/tokens/database/) belongs to + $ref: '#/components/schemas/UuidV4' + clusterId: + description: The ID of the cluster that the [database token](/influxdb3/cloud-dedicated/admin/tokens/database/) belongs to + $ref: '#/components/schemas/UuidV4' + id: + description: The ID of the database token + $ref: '#/components/schemas/UuidV4' + description: + $ref: '#/components/schemas/DatabaseTokenDescription' + permissions: + $ref: '#/components/schemas/DatabaseTokenPermissions' + createdAt: + $ref: '#/components/schemas/DatabaseTokenCreatedAt' + accessToken: + $ref: '#/components/schemas/DatabaseTokenAccessToken' + expiresAt: + $ref: '#/components/schemas/DatabaseTokenExpiresAt' + revokedAt: + $ref: '#/components/schemas/DatabaseTokenRevokedAt' + required: + - accountId + - clusterId + - id + - description + - permissions + - createdAt + - accessToken + examples: + limitedAccessToken: + summary: Limited Access Token + value: + accountId: 11111111-1111-4111-8111-111111111111 + clusterId: 33333333-3333-4333-8333-333333333333 + id: 55555555-5555-4555-8555-555555555555 + description: Limited Access Token + permissions: + - action: read + resource: DatabaseOne + - action: write + resource: DatabaseTwo + createdAt: '2023-12-21T17:32:28.000Z' + accessToken: apiv1_5555555555555555555555555555555555555555555555555555555555555555 + fullAccessToken: + summary: Full Access Token + value: + accountId: 11111111-1111-4111-8111-111111111111 + clusterId: 33333333-3333-4333-8333-333333333333 + id: 66666666-6666-4666-8666-666666666666 + description: Full Access Token + permissions: + - action: write + resource: '*' + createdAt: '2024-03-02T04:20:19.000Z' + accessToken: apiv1_6666666666666666666666666666666666666666666666666666666666666666 + noAccessToken: + summary: No Access Token + value: + accountId: 11111111-1111-4111-8111-111111111111 + clusterId: 33333333-3333-4333-8333-333333333333 + id: 66666666-6666-4666-8666-666666666666 + description: No Access Token + permissions: [] + createdAt: '2024-03-02T04:20:19.000Z' + accessToken: apiv1_7777777777777777777777777777777777777777777777777777777777777777 + '400': + $ref: '#/components/responses/BadRequest' + '401': + $ref: '#/components/responses/Unauthorized' + '403': + $ref: '#/components/responses/Forbidden' + '404': + $ref: '#/components/responses/NotFound' + '409': + $ref: '#/components/responses/Conflict' + '500': + $ref: '#/components/responses/InternalServerError' + x-codeSamples: + - label: cURL + lang: Shell + source: | + HOST="https://console.influxdata.com" + + create_token () { + local description=$1 + local response=$( \ + curl \ + --location "$HOST/api/v0/accounts/$ACCOUNT_ID/clusters/$CLUSTER_ID/tokens" \ + --header "Accept: application/json" \ + --header 'Content-Type: application/json' \ + --header "Authorization: Bearer $MANAGEMENT_TOKEN" \ + --data '{ + "description": "'$description'", + "permissions": [ + { + "action": "read", + "resource": "DatabaseOne" + }, + { + "action": "write", + "resource": "DatabaseTwo" + } + ] + }' \ + ) + echo "$response" + } + - target: $.paths['/accounts/{accountId}/clusters/{clusterId}/tokens/{tokenId}'] + update: + get: + operationId: GetDatabaseToken + summary: Get a database token + description: | + Retrieve metadata details for a [database token](/influxdb3/cloud-dedicated/admin/tokens/database/). + + #### Store secure tokens in a secret store + + We recommend storing database tokens in a **secure secret store**. + + Token strings are viewable _only_ on token creation and aren't stored by InfluxDB; you can't recover a lost token. + tags: + - Database tokens + parameters: + - name: accountId + in: path + description: The ID of the [account](/influxdb3/cloud-dedicated/get-started/setup/#request-an-influxdb-cloud-dedicated-cluster) that the [database token](/influxdb3/cloud-dedicated/admin/tokens/database/) belongs to + required: true + schema: + $ref: '#/components/schemas/UuidV4' + - name: clusterId + in: path + description: The ID of the cluster that the [database token](/influxdb3/cloud-dedicated/admin/tokens/database/) belongs to + required: true + schema: + $ref: '#/components/schemas/UuidV4' + - name: tokenId + in: path + description: The ID of the [database token](/influxdb3/cloud-dedicated/admin/tokens/database/) to get + required: true + schema: + $ref: '#/components/schemas/UuidV4' + responses: + '200': + description: The database token was successfully retrieved. + content: + application/json: + schema: + type: object + properties: + accountId: + description: The ID of the [account](/influxdb3/cloud-dedicated/get-started/setup/#request-an-influxdb-cloud-dedicated-cluster) that the [database token](/influxdb3/cloud-dedicated/admin/tokens/database/) belongs to + $ref: '#/components/schemas/UuidV4' + clusterId: + description: The ID of the cluster that the [database token](/influxdb3/cloud-dedicated/admin/tokens/database/) belongs to + $ref: '#/components/schemas/UuidV4' + id: + description: The ID of the database token + $ref: '#/components/schemas/UuidV4' + description: + $ref: '#/components/schemas/DatabaseTokenDescription' + permissions: + $ref: '#/components/schemas/DatabaseTokenPermissions' + createdAt: + $ref: '#/components/schemas/DatabaseTokenCreatedAt' + expiresAt: + $ref: '#/components/schemas/DatabaseTokenExpiresAt' + revokedAt: + $ref: '#/components/schemas/DatabaseTokenRevokedAt' + required: + - accountId + - clusterId + - id + - description + - permissions + - createdAt + examples: + limitedAccessToken: + summary: Limited Access Token + value: + accountId: 11111111-1111-4111-8111-111111111111 + clusterId: 33333333-3333-4333-8333-333333333333 + id: 55555555-5555-4555-8555-555555555555 + description: Limited Access Token + permissions: + - action: read + resource: DatabaseOne + - action: write + resource: DatabaseTwo + createdAt: '2023-12-21T17:32:28.000Z' + fullAccessToken: + summary: Full Access Token + value: + accountId: 11111111-1111-4111-8111-111111111111 + clusterId: 33333333-3333-4333-8333-333333333333 + id: 66666666-6666-4666-8666-666666666666 + description: Full Access Token + permissions: + - action: write + resource: '*' + createdAt: '2024-03-02T04:20:19.000Z' + noAccessToken: + summary: No Access Token + value: + accountId: 11111111-1111-4111-8111-111111111111 + clusterId: 33333333-3333-4333-8333-333333333333 + id: 77777777-7777-4777-8777-777777777777 + description: No Access Token + permissions: [] + createdAt: '2024-03-02T04:20:19.000Z' + '400': + $ref: '#/components/responses/BadRequest' + '401': + $ref: '#/components/responses/Unauthorized' + '403': + $ref: '#/components/responses/Forbidden' + '404': + $ref: '#/components/responses/NotFound' + '500': + $ref: '#/components/responses/InternalServerError' + x-codeSamples: + - label: cURL + lang: Shell + source: | + HOST="https://console.influxdata.com" + + get_token () { + local tokenId=$1 + local response=$( \ + curl \ + --location "$HOST/api/v0/accounts/$ACCOUNT_ID/clusters/$CLUSTER_ID/tokens/$tokenId" \ + --header "Accept: application/json" \ + --header "Authorization: Bearer $MANAGEMENT_TOKEN" \ + ) + echo "$response" + } + patch: + operationId: UpdateDatabaseToken + summary: Update a database token + description: | + Update the description and permissions of a [database token](/influxdb3/cloud-dedicated/admin/tokens/database/). + tags: + - Database tokens + parameters: + - name: accountId + in: path + description: The ID of the [account](/influxdb3/cloud-dedicated/get-started/setup/#request-an-influxdb-cloud-dedicated-cluster) that the [database token](/influxdb3/cloud-dedicated/admin/tokens/database/) belongs to + required: true + schema: + $ref: '#/components/schemas/UuidV4' + - name: clusterId + in: path + description: The ID of the cluster that the [database token](/influxdb3/cloud-dedicated/admin/tokens/database/) belongs to + required: true + schema: + $ref: '#/components/schemas/UuidV4' + - name: tokenId + in: path + description: The ID of the [database token](/influxdb3/cloud-dedicated/admin/tokens/database/) to update + schema: + $ref: '#/components/schemas/UuidV4' + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + description: + $ref: '#/components/schemas/DatabaseTokenDescription' + permissions: + $ref: '#/components/schemas/DatabaseTokenPermissions' + minProperties: 1 + examples: + allFields: + summary: Update All Fields + value: + description: Updated Limited Access Token + permissions: + - action: write + resource: DatabaseOne + - action: read + resource: DatabaseTwo + - action: write + resource: DatabaseThree + descriptionOnly: + summary: Update Description Only + value: + description: Updated Limited Access Token + permissionsOnly: + summary: Update Permissions Only + value: + permissions: + - action: write + resource: DatabaseOne + - action: read + resource: DatabaseTwo + - action: write + resource: DatabaseThree + removeAllPermissions: + summary: Remove All Permissions + value: + permissions: [] + responses: + '200': + description: The database token was successfully updated + content: + application/json: + schema: + type: object + properties: + accountId: + description: The ID of the [account](/influxdb3/cloud-dedicated/get-started/setup/#request-an-influxdb-cloud-dedicated-cluster) that the [database token](/influxdb3/cloud-dedicated/admin/tokens/database/) belongs to + $ref: '#/components/schemas/UuidV4' + clusterId: + description: The ID of the cluster that the [database token](/influxdb3/cloud-dedicated/admin/tokens/database/) belongs to + $ref: '#/components/schemas/UuidV4' + id: + description: The ID of the database token + $ref: '#/components/schemas/UuidV4' + description: + $ref: '#/components/schemas/DatabaseTokenDescription' + permissions: + $ref: '#/components/schemas/DatabaseTokenPermissions' + createdAt: + $ref: '#/components/schemas/DatabaseTokenCreatedAt' + expiresAt: + $ref: '#/components/schemas/DatabaseTokenExpiresAt' + revokedAt: + $ref: '#/components/schemas/DatabaseTokenRevokedAt' + required: + - accountId + - clusterId + - id + - description + - permissions + - createdAt + examples: + allFields: + summary: Update All Fields + value: + accountId: 11111111-1111-4111-8111-111111111111 + clusterId: 33333333-3333-4333-8333-333333333333 + id: 55555555-5555-4555-8555-555555555555 + description: Updated Limited Access Token + permissions: + - action: write + resource: DatabaseOne + - action: read + resource: DatabaseTwo + - action: write + resource: DatabaseThree + createdAt: '2023-12-21T17:32:28.000Z' + descriptionOnly: + summary: Update Description Only + value: + accountId: 11111111-1111-4111-8111-111111111111 + clusterId: 33333333-3333-4333-8333-333333333333 + id: 55555555-5555-4555-8555-555555555555 + description: Updated Limited Access Token + permissions: + - action: read + resource: DatabaseOne + - action: write + resource: DatabaseTwo + createdAt: '2023-12-21T17:32:28.000Z' + permissionsOnly: + summary: Update Permissions Only + value: + accountId: 11111111-1111-4111-8111-111111111111 + clusterId: 33333333-3333-4333-8333-333333333333 + id: 55555555-5555-4555-8555-555555555555 + description: Limited Access Token + permissions: + - action: write + resource: DatabaseOne + - action: read + resource: DatabaseTwo + - action: write + resource: DatabaseThree + createdAt: '2023-12-21T17:32:28.000Z' + removeAllPermissions: + summary: Remove All Permissions + value: + accountId: 11111111-1111-4111-8111-111111111111 + clusterId: 33333333-3333-4333-8333-333333333333 + id: 55555555-5555-4555-8555-555555555555 + description: Limited Access Token + permissions: [] + createdAt: '2023-12-21T17:32:28.000Z' + '400': + $ref: '#/components/responses/BadRequest' + '401': + $ref: '#/components/responses/Unauthorized' + '403': + $ref: '#/components/responses/Forbidden' + '404': + $ref: '#/components/responses/NotFound' + '409': + $ref: '#/components/responses/Conflict' + '500': + $ref: '#/components/responses/InternalServerError' + x-codeSamples: + - label: cURL + lang: Shell + source: | + HOST="https://console.influxdata.com" + + update_token () { + local tokenId=$1 + local response=$( \ + curl \ + --location "$HOST/api/v0/accounts/$ACCOUNT_ID/clusters/$CLUSTER_ID/tokens/$tokenId" \ + --request PATCH \ + --header "Accept: application/json" \ + --header 'Content-Type: application/json' \ + --header "Authorization: Bearer $MANAGEMENT_TOKEN" \ + --data '{ + "description": "Updated Limited Access Token", + "permissions": [ + { + "action": "write", + "resource": "DatabaseOne" + }, + { + "action": "read", + "resource": "DatabaseTwo" + }, + { + "action": "write", + "resource": "DatabaseThree" + } + ] + }' \ + ) + echo "$response" + } + delete: + operationId: DeleteDatabaseToken + summary: Delete a database token + tags: + - Database tokens + parameters: + - name: accountId + in: path + description: The ID of the [account](/influxdb3/cloud-dedicated/get-started/setup/#request-an-influxdb-cloud-dedicated-cluster) that the [database token](/influxdb3/cloud-dedicated/admin/tokens/database/) belongs to + required: true + schema: + $ref: '#/components/schemas/UuidV4' + - name: clusterId + in: path + description: The ID of the cluster that the [database token](/influxdb3/cloud-dedicated/admin/tokens/database/) belongs to + required: true + schema: + $ref: '#/components/schemas/UuidV4' + - name: tokenId + in: path + description: The ID of the [database token](/influxdb3/cloud-dedicated/admin/tokens/database/) to delete + required: true + schema: + $ref: '#/components/schemas/UuidV4' + responses: + '204': + description: The database token was successfully deleted + $ref: '#/components/responses/NoContent' + '400': + $ref: '#/components/responses/BadRequest' + '401': + $ref: '#/components/responses/Unauthorized' + '403': + $ref: '#/components/responses/Forbidden' + '404': + $ref: '#/components/responses/NotFound' + '500': + $ref: '#/components/responses/InternalServerError' + x-codeSamples: + - label: cURL + lang: Shell + source: | + HOST="https://console.influxdata.com" + + delete_token () { + local tokenId=$1 + local response=$( \ + curl \ + --location "$HOST/api/v0/accounts/$ACCOUNT_ID/clusters/$CLUSTER_ID/tokens/$tokenId" \ + --request DELETE \ + --header "Accept: application/json" \ + --header "Authorization: Bearer $MANAGEMENT_TOKEN" \ + ) + echo "$response" + } diff --git a/api-docs/influxdb3/clustered/management/content/info.yml b/api-docs/influxdb3/clustered/management/content/info.yml deleted file mode 100644 index 0d324fadb5..0000000000 --- a/api-docs/influxdb3/clustered/management/content/info.yml +++ /dev/null @@ -1,15 +0,0 @@ -title: InfluxDB 3 Clustered Management API -x-influxdata-short-title: Management API -description: | - The Management API for InfluxDB 3 Clustered provides a programmatic interface for managing an InfluxDB 3 cluster. - The Management API lets you integrate functions such as creating and managing databases, permissions, and tokens into your workflow or application. - - This documentation is generated from the - InfluxDB 3 Management API OpenAPI specification. -license: - name: MIT - url: 'https://opensource.org/licenses/MIT' -contact: - name: InfluxData - url: https://www.influxdata.com - email: support@influxdata.com \ No newline at end of file diff --git a/api-docs/influxdb3/clustered/management/content/servers.yml b/api-docs/influxdb3/clustered/management/content/servers.yml deleted file mode 100644 index edec580b8a..0000000000 --- a/api-docs/influxdb3/clustered/management/content/servers.yml +++ /dev/null @@ -1,8 +0,0 @@ -- url: 'https://{baseurl}/api/v0' - description: InfluxDB 3 Clustered Management API URL - variables: - baseurl: - enum: - - 'console.influxdata.com' - default: 'console.influxdata.com' - description: InfluxDB 3 Clustered Console URL diff --git a/api-docs/influxdb3/clustered/management/content/tag-groups.yml b/api-docs/influxdb3/clustered/management/content/tag-groups.yml deleted file mode 100644 index 57e8c8484c..0000000000 --- a/api-docs/influxdb3/clustered/management/content/tag-groups.yml +++ /dev/null @@ -1,6 +0,0 @@ -- name: Using the Management API - tags: - - Authentication - - Quickstart -- name: All endpoints - tags: [] diff --git a/api-docs/influxdb3/clustered/management/openapi.yml b/api-docs/influxdb3/clustered/management/openapi.yml index 410d10fc8f..2eee26276d 100644 --- a/api-docs/influxdb3/clustered/management/openapi.yml +++ b/api-docs/influxdb3/clustered/management/openapi.yml @@ -790,11 +790,11 @@ paths: permissions: $ref: '#/components/schemas/DatabaseTokenPermissions' createdAt: - $ref: '#/components/schemas/DatabaseTokenCreatedAt' + $ref: '#/components/schemas/DateTimeRfc3339' expiresAt: - $ref: '#/components/schemas/DatabaseTokenExpiresAt' + $ref: '#/components/schemas/DateTimeRfc3339' revokedAt: - $ref: '#/components/schemas/DatabaseTokenRevokedAt' + $ref: '#/components/schemas/DateTimeRfc3339' required: - id - description @@ -878,7 +878,7 @@ paths: permissions: $ref: '#/components/schemas/DatabaseTokenPermissions' expiresAt: - $ref: '#/components/schemas/DatabaseTokenExpiresAt' + $ref: '#/components/schemas/DateTimeRfc3339' required: - description examples: @@ -919,13 +919,13 @@ paths: permissions: $ref: '#/components/schemas/DatabaseTokenPermissions' createdAt: - $ref: '#/components/schemas/DatabaseTokenCreatedAt' + $ref: '#/components/schemas/DateTimeRfc3339' accessToken: $ref: '#/components/schemas/DatabaseTokenAccessToken' expiresAt: - $ref: '#/components/schemas/DatabaseTokenExpiresAt' + $ref: '#/components/schemas/DateTimeRfc3339' revokedAt: - $ref: '#/components/schemas/DatabaseTokenRevokedAt' + $ref: '#/components/schemas/DateTimeRfc3339' required: - id - description @@ -1034,11 +1034,11 @@ paths: permissions: $ref: '#/components/schemas/DatabaseTokenPermissions' createdAt: - $ref: '#/components/schemas/DatabaseTokenCreatedAt' + $ref: '#/components/schemas/DateTimeRfc3339' expiresAt: - $ref: '#/components/schemas/DatabaseTokenExpiresAt' + $ref: '#/components/schemas/DateTimeRfc3339' revokedAt: - $ref: '#/components/schemas/DatabaseTokenRevokedAt' + $ref: '#/components/schemas/DateTimeRfc3339' required: - id - description @@ -1168,11 +1168,11 @@ paths: permissions: $ref: '#/components/schemas/DatabaseTokenPermissions' createdAt: - $ref: '#/components/schemas/DatabaseTokenCreatedAt' + $ref: '#/components/schemas/DateTimeRfc3339' expiresAt: - $ref: '#/components/schemas/DatabaseTokenExpiresAt' + $ref: '#/components/schemas/DateTimeRfc3339' revokedAt: - $ref: '#/components/schemas/DatabaseTokenRevokedAt' + $ref: '#/components/schemas/DateTimeRfc3339' required: - id - description @@ -1359,7 +1359,7 @@ components: minLength: 1 ClusterDatabaseRetentionPeriod: description: | - The retention period of the [cluster database](/influxdb3/clustered/admin/databases/) in nanoseconds, if applicable + The retention period of the [cluster database](/influxdb/version/admin/databases/) in nanoseconds, if applicable If the retention period is not set or is set to 0, the database will have infinite retention type: integer @@ -1389,9 +1389,9 @@ components: minimum: 1 ClusterDatabasePartitionTemplate: description: | - A template for [partitioning](/influxdb3/clustered/admin/custom-partitions/) a cluster database. + A template for [partitioning](/influxdb/version/admin/custom-partitions/) a cluster database. - Each template part is evaluated in sequence, concatinating the final + Each template part is evaluated in sequence, concatenating the final partition key from the output of each part, delimited by the partition key delimiter `|`. @@ -1433,7 +1433,7 @@ components: * `time=2023-01-01, a=` -> `2023|#|!|!` * `time=2023-01-01, c=` -> `2023|!|!|` - When using the default [partitioning](/influxdb3/clustered/admin/custom-partitions/) template (YYYY-MM-DD) there is no + When using the default [partitioning](/influxdb/version/admin/custom-partitions/) template (YYYY-MM-DD) there is no encoding necessary, as the derived partition key contains a single part, and no reserved characters. [`TemplatePart::Bucket`] parts by definition will always be within the part length limit and contain no restricted characters @@ -1535,7 +1535,7 @@ components: tagName: c numberOfBuckets: 10 ClusterDatabaseTableName: - description: The name of the [cluster database](/influxdb3/clustered/admin/databases/) table + description: The name of the [cluster database](/influxdb/version/admin/databases/) table type: string examples: - TableOne @@ -1548,15 +1548,15 @@ components: - Limited Access Token - Full Access Token DatabaseTokenResourceAllDatabases: - description: A resource value for a [database token](/influxdb3/clustered/admin/tokens/database/) permission that refers to all databases + description: A resource value for a [database token](/influxdb/version/admin/tokens/database/) permission that refers to all databases type: string enum: - '*' DatabaseTokenPermissionAction: - description: The action the [database token](/influxdb3/clustered/admin/tokens/database/) permission allows + description: The action the [database token](/influxdb/version/admin/tokens/database/) permission allows type: string DatabaseTokenPermissionResource: - description: The resource the [database token](/influxdb3/clustered/admin/tokens/database/) permission applies to + description: The resource the [database token](/influxdb/version/admin/tokens/database/) permission applies to anyOf: - $ref: '#/components/schemas/ClusterDatabaseName' - $ref: '#/components/schemas/DatabaseTokenResourceAllDatabases' @@ -1580,7 +1580,7 @@ components: - action: write resource: '*' DatabaseTokenPermissions: - description: The list of permissions the [database token](/influxdb3/clustered/admin/tokens/database/) allows + description: The list of permissions the [database token](/influxdb/version/admin/tokens/database/) allows type: array items: $ref: '#/components/schemas/DatabaseTokenPermission' @@ -1593,7 +1593,7 @@ components: resource: '*' DatabaseTokenCreatedAt: description: | - The date and time that the [database token](/influxdb3/clustered/admin/tokens/database/) was created + The date and time that the [database token](/influxdb/version/admin/tokens/database/) was created Uses RFC3339 format $ref: '#/components/schemas/DateTimeRfc3339' diff --git a/api-docs/influxdb3/clustered/management/overlay.yml b/api-docs/influxdb3/clustered/management/overlay.yml new file mode 100644 index 0000000000..75927fc2e8 --- /dev/null +++ b/api-docs/influxdb3/clustered/management/overlay.yml @@ -0,0 +1,1323 @@ +overlay: 1.0.0 +info: + title: Clustered Management API overlay + version: 1.0.0 +actions: + - target: $.info + update: + title: InfluxDB 3 Clustered Management API + description: | + The Management API for InfluxDB 3 Clustered provides a programmatic interface for managing an InfluxDB 3 cluster. + The Management API lets you integrate functions such as creating and managing databases, permissions, and tokens into your workflow or application. + + This documentation is generated from the + InfluxDB 3 Management API OpenAPI specification. + version: '' + license: + name: MIT + url: https://opensource.org/licenses/MIT + contact: + name: InfluxData + url: https://www.influxdata.com + email: support@influxdata.com + - target: $.servers + update: + - url: https://{baseurl}/api/v0 + description: InfluxDB 3 Clustered Management API URL + variables: + baseurl: + enum: + - console.influxdata.com + default: console.influxdata.com + description: InfluxDB 3 Clustered Console URL + - target: $.tags + update: + - name: Authentication + x-traitTag: true + description: | + With InfluxDB 3 Clustered, InfluxDB Management API endpoints require the following credential: + + - `Authorization MANAGEMENT_TOKEN`: the `Authorization` HTTP header with a [management token](/influxdb3/clustered/admin/tokens/management/). + + See how to [create a management token](/influxdb3/clustered/admin/tokens/management/). + + By default, management tokens in InfluxDB 3 are short-lived tokens issued by an OAuth2 identity provider that grant a specific user administrative access to your InfluxDB cluster. However, for automation purposes, you can manually create management tokens that authenticate directly with your InfluxDB cluster and do not require human interaction with your identity provider. + - name: Database tokens + description: Manage database read/write tokens for a cluster + - name: Databases + description: Manage databases for a cluster + - name: Quickstart + x-traitTag: true + description: | + The following example script shows how to use `curl` to make database and token management requests: + + ```shell + #!/bin/bash + + # Usage: + # Note the leading space in the command below to keep secrets out of the shell history + # + # ``` + # MANAGEMENT_TOKEN= ./scripts/test_http_api_v0_endpoints.sh + # ``` + + # Env var validation + if [ -z "${MANAGEMENT_TOKEN}" ]; then + echo " + [Error]: ❌ + \$MANAGEMENT_TOKEN env var is required. + " + exit 1 + fi + + HOST="https://cluster-host.com" + + # Database request functions + list_databases () { + local response=$( \ + curl \ + --location "$HOST/api/v0/databases" \ + --header "Accept: application/json" \ + --header "Authorization: Bearer $MANAGEMENT_TOKEN" \ + ) + echo "$response" + } + + create_database () { + local databaseName=$1 + local response=$( \ + curl \ + --location "$HOST/api/v0/databases" \ + --header "Accept: application/json" \ + --header 'Content-Type: application/json' \ + --header "Authorization: Bearer $MANAGEMENT_TOKEN" \ + --data '{ + "name": "'$databaseName'", + "maxTables": 75, + "maxColumnsPerTable": 90, + "retentionPeriod": 600000000000, + "partitionTemplate": [ + { + "type": "tag", + "value": "abc" + }, + { + "type": "bucket", + "value": { + "tagName": "def", + "numberOfBuckets": 5 + } + } + ] + }' \ + ) + echo "$response" + } + + update_database () { + local databaseName=$1 + local response=$( \ + curl \ + --location "$HOST/api/v0/databases/$databaseName" \ + --request PATCH \ + --header "Accept: application/json" \ + --header 'Content-Type: application/json' \ + --header "Authorization: Bearer $MANAGEMENT_TOKEN" \ + --data '{ + "maxTables": 150, + "maxColumnsPerTable": 180, + "retentionPeriod": 1200000000000 + }' \ + ) + echo "$response" + } + + delete_database () { + local databaseName=$1 + local response=$( \ + curl \ + --location "$HOST/api/v0/databases/$databaseName" \ + --request DELETE \ + --header "Accept: application/json" \ + --header "Authorization: Bearer $MANAGEMENT_TOKEN" \ + ) + echo "$response" + } + + # Token request functions + list_tokens () { + local response=$( \ + curl \ + --location "$HOST/api/v0/tokens" \ + --header "Accept: application/json" \ + --header "Authorization: Bearer $MANAGEMENT_TOKEN" \ + ) + echo "$response" + } + + create_token () { + local response=$( \ + curl \ + --location "$HOST/api/v0/tokens" \ + --header "Accept: application/json" \ + --header 'Content-Type: application/json' \ + --header "Authorization: Bearer $MANAGEMENT_TOKEN" \ + --data '{ + "description": "my test token", + "permissions": [ + { + "action": "write", + "resource": "database_one" + }, + { + "action": "read", + "resource": "database_two" + } + ] + }' \ + ) + echo "$response" + } + + get_token () { + local token_id=$1 + local response=$( \ + curl \ + --location "$HOST/api/v0/tokens/$tokenId" \ + --header "Accept: application/json" \ + --header "Authorization: Bearer $MANAGEMENT_TOKEN" \ + ) + echo "$response" + } + + update_token () { + local token_id=$1 + local response=$( \ + curl \ + --location "$HOST/api/v0/tokens/$tokenId" \ + --request PATCH \ + --header "Accept: application/json" \ + --header 'Content-Type: application/json' \ + --header "Authorization: Bearer $MANAGEMENT_TOKEN" \ + --data '{ + "description": "my updated test token", + "permissions": [ + { + "action": "database_one", + "resource": "read" + } + ] + }' \ + ) + echo "$response" + } + + delete_token () { + local token_id=$1 + local response=$( \ + curl \ + --location "$HOST/api/v0/tokens/$tokenId" \ + --request DELETE \ + --header "Accept: application/json" \ + --header "Authorization: Bearer $MANAGEMENT_TOKEN" \ + ) + echo "$response" + } + + + # Test database endpoints + databaseName="test_database_$RANDOM" + + printf "\n🏗️ Creating database... 🏗️\n\n" + response="$(create_database $databaseName)" + echo $response | jq + printf "\n🏗️ Creating database successful 🏗️\n\n" + + printf "\n⬆️ Updating database... ⬆️\n\n" + response="$(update_database $databaseName)" + echo $response | jq + printf "\n⬆️ Updating database successful ⬆️\n\n" + + printf "\n⬇️ Listing databases... ⬇️\n\n" + response="$(list_databases)" + echo $response | jq + printf "\n⬇️ Listing databases successful ⬇️\n\n" + + printf "\n🗑️ Deleting database... 🗑️\n\n" + response="$(delete_database $databaseName)" + echo $response | jq + printf "\n🗑️ Deleting database successful 🗑️\n\n" + + + # Test token endpoints + printf "\n🏗️ Creating token... 🏗️\n\n" + response="$(create_token)" + echo $response | jq + tokenId=$(echo $response | jq '.id') + printf "\n🏗️ Creating token successful 🏗️\n\n" + + printf "\n⬇️ Getting token... ⬇️\n\n" + response="$(get_token $tokenId)" + echo $response | jq + printf "\n⬇️ Getting token successful ⬇️\n\n" + + printf "\n⬆️ Updating token... ⬆️\n\n" + response="$(update_token $tokenId)" + echo $response | jq + printf "\n⬆️ Updating token successful ⬆️\n\n" + + printf "\n📋 Listing tokens... 📋\n\n" + response="$(list_tokens)" + echo $response | jq + printf "\n📋 Listing tokens successful 📋\n\n" + + printf "\n🗑️ Deleting token... 🗑️\n\n" + response="$(delete_token $tokenId)" + echo $response | jq + printf "\n🗑️ Deleting token successful 🗑️\n\n" + ``` + - name: Tables + description: Manage tables in a database + - target: $.paths['/databases'] + update: + get: + operationId: GetClusterDatabases + summary: Get all databases for a cluster + responses: + '200': + description: The cluster databases were successfully retrieved + content: + application/json: + schema: + type: array + items: + type: object + properties: + name: + $ref: '#/components/schemas/ClusterDatabaseName' + maxTables: + $ref: '#/components/schemas/ClusterDatabaseMaxTables' + maxColumnsPerTable: + $ref: '#/components/schemas/ClusterDatabaseMaxColumnsPerTable' + retentionPeriod: + $ref: '#/components/schemas/ClusterDatabaseRetentionPeriod' + partitionTemplate: + $ref: '#/components/schemas/ClusterDatabasePartitionTemplate' + required: + - name + - maxTables + - maxColumnsPerTable + - retentionPeriod + example: + - name: DatabaseOne + maxTables: 500 + maxColumnsPerTable: 200 + retentionPeriod: 0 + - name: DatabaseTwo + maxTables: 100 + maxColumnsPerTable: 50 + retentionPeriod: 300000000000 + partitionTemplate: + - type: time + value: '%Y' + - type: tag + value: bananas + - type: tag + value: plátanos + - type: bucket + value: + tagName: c + numberOfBuckets: 10 + '400': + $ref: '#/components/responses/BadRequest' + '401': + $ref: '#/components/responses/Unauthorized' + '403': + $ref: '#/components/responses/Forbidden' + '404': + $ref: '#/components/responses/NotFound' + '500': + $ref: '#/components/responses/InternalServerError' + x-codeSamples: + - label: '' + lang: Shell + source: | + HOST="https://cluster-host.com" + + list_databases () { + local response=$( \ + curl \ + --location "$HOST/api/v0/databases" \ + --header "Accept: application/json" \ + --header "Authorization: Bearer $MANAGEMENT_TOKEN" \ + ) + echo "$response" + } + tags: + - Databases + post: + operationId: CreateClusterDatabase + summary: Create a database + tags: + - Databases + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + name: + $ref: '#/components/schemas/ClusterDatabaseName' + maxTables: + $ref: '#/components/schemas/ClusterDatabaseMaxTables' + maxColumnsPerTable: + $ref: '#/components/schemas/ClusterDatabaseMaxColumnsPerTable' + retentionPeriod: + $ref: '#/components/schemas/ClusterDatabaseRetentionPeriod' + partitionTemplate: + $ref: '#/components/schemas/ClusterDatabasePartitionTemplate' + required: + - name + examples: + requiredFieldsOnly: + summary: Required Fields Only + value: + name: DatabaseOne + allFields: + summary: All Fields + value: + name: DatabaseTwo + maxTables: 100 + maxColumnsPerTable: 50 + retentionPeriod: 300000000000 + partitionTemplate: + - type: time + value: '%Y' + - type: tag + value: bananas + - type: tag + value: plátanos + - type: bucket + value: + tagName: c + numberOfBuckets: 10 + responses: + '200': + description: The cluster database was successfully created + content: + application/json: + schema: + type: object + properties: + name: + $ref: '#/components/schemas/ClusterDatabaseName' + maxTables: + $ref: '#/components/schemas/ClusterDatabaseMaxTables' + maxColumnsPerTable: + $ref: '#/components/schemas/ClusterDatabaseMaxColumnsPerTable' + retentionPeriod: + $ref: '#/components/schemas/ClusterDatabaseRetentionPeriod' + partitionTemplate: + $ref: '#/components/schemas/ClusterDatabasePartitionTemplate' + required: + - name + - maxTables + - maxColumnsPerTable + - retentionPeriod + examples: + requiredFieldsOnly: + summary: Required Fields Only + value: + name: DatabaseOne + maxTables: 500 + maxColumnsPerTable: 200 + retentionPeriod: 0 + allFields: + summary: All Fields + value: + name: DatabaseTwo + maxTables: 100 + maxColumnsPerTable: 50 + retentionPeriod: 300000000000 + partitionTemplate: + - type: time + value: '%Y' + - type: tag + value: a + - type: tag + value: c + - type: bucket + value: + tagName: c + numberOfBuckets: 10 + '400': + $ref: '#/components/responses/BadRequest' + '401': + $ref: '#/components/responses/Unauthorized' + '403': + $ref: '#/components/responses/Forbidden' + '404': + $ref: '#/components/responses/NotFound' + '409': + $ref: '#/components/responses/Conflict' + '500': + $ref: '#/components/responses/InternalServerError' + x-codeSamples: + - label: cURL + lang: Shell + source: | + HOST="https://cluster-host.com" + + create_database () { + local databaseName=$1 + local response=$( \ + curl \ + --location "$HOST/api/v0/databases" \ + --header "Accept: application/json" \ + --header 'Content-Type: application/json' \ + --header "Authorization: Bearer $MANAGEMENT_TOKEN" \ + --data '{ + "name": "'$databaseName'", + "maxTables": 75, + "maxColumnsPerTable": 90, + "retentionPeriod": 600000000000, + "partitionTemplate": [ + { + "type": "tag", + "value": "abc" + }, + { + "type": "bucket", + "value": { + "tagName": "def", + "numberOfBuckets": 5 + } + } + ] + }' \ + ) + echo "$response" + } + - target: $.paths['/databases/{databaseName}'] + update: + patch: + operationId: UpdateClusterDatabase + summary: Update a database + tags: + - Databases + parameters: + - name: databaseName + in: path + description: The name of the database to update + required: true + schema: + $ref: '#/components/schemas/ClusterDatabaseName' + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + maxTables: + $ref: '#/components/schemas/ClusterDatabaseMaxTables' + maxColumnsPerTable: + $ref: '#/components/schemas/ClusterDatabaseMaxColumnsPerTable' + retentionPeriod: + $ref: '#/components/schemas/ClusterDatabaseRetentionPeriod' + minProperties: 1 + examples: + allFields: + summary: Update All Fields + value: + maxTables: 300 + maxColumnsPerTable: 150 + retentionPeriod: 600000000000 + maxTablesOnly: + summary: Update Max Tables Only + value: + maxTables: 300 + maxColumnsPerTableOnly: + summary: Update Max Columns Per Table Only + value: + maxColumnsPerTable: 150 + retentionPeriodOnly: + summary: Update Retention Period Only + value: + retentionPeriod: 600000000000 + responses: + '200': + description: The cluster database was successfully updated. + content: + application/json: + schema: + type: object + properties: + name: + $ref: '#/components/schemas/ClusterDatabaseName' + maxTables: + $ref: '#/components/schemas/ClusterDatabaseMaxTables' + maxColumnsPerTable: + $ref: '#/components/schemas/ClusterDatabaseMaxColumnsPerTable' + retentionPeriod: + $ref: '#/components/schemas/ClusterDatabaseRetentionPeriod' + required: + - maxTables + - maxColumnsPerTable + - retentionPeriod + - name + examples: + allFields: + summary: Update All Fields + value: + name: DatabaseOne + maxTables: 300 + maxColumnsPerTable: 150 + retentionPeriod: 600000000000 + maxTablesOnly: + summary: Update Max Tables Only + value: + name: DatabaseOne + maxTables: 300 + maxColumnsPerTable: 200 + retentionPeriod: 0 + maxColumnsPerTableOnly: + summary: Update Max Columns Per Table Only + value: + name: DatabaseOne + maxTables: 500 + maxColumnsPerTable: 150 + retentionPeriod: 0 + retentionPeriodOnly: + summary: Update Retention Period Only + value: + name: DatabaseOne + maxTables: 500 + maxColumnsPerTable: 200 + retentionPeriod: 600000000000 + '400': + $ref: '#/components/responses/BadRequest' + '401': + $ref: '#/components/responses/Unauthorized' + '403': + $ref: '#/components/responses/Forbidden' + '404': + $ref: '#/components/responses/NotFound' + '500': + $ref: '#/components/responses/InternalServerError' + x-codeSamples: + - label: cURL + lang: Shell + source: | + HOST="https://cluster-host.com" + + update_database () { + local databaseName=$1 + local response=$( \ + curl \ + --location "$HOST/api/v0/databases/$databaseName" \ + --request PATCH \ + --header "Accept: application/json" \ + --header 'Content-Type: application/json' \ + --header "Authorization: Bearer $MANAGEMENT_TOKEN" \ + --data '{ + "maxTables": 150, + "maxColumnsPerTable": 180, + "retentionPeriod": 1200000000000 + }' \ + ) + echo "$response" + } + delete: + operationId: DeleteClusterDatabase + summary: Delete a database + tags: + - Databases + parameters: + - name: databaseName + in: path + description: The name of the database to delete + required: true + schema: + $ref: '#/components/schemas/ClusterDatabaseName' + responses: + '204': + description: The cluster database was successfully deleted + $ref: '#/components/responses/NoContent' + '400': + $ref: '#/components/responses/BadRequest' + '401': + $ref: '#/components/responses/Unauthorized' + '403': + $ref: '#/components/responses/Forbidden' + '404': + $ref: '#/components/responses/NotFound' + '500': + $ref: '#/components/responses/InternalServerError' + x-codeSamples: + - label: cURL + lang: Shell + source: | + HOST="https://cluster-host.com" + + delete_database () { + local databaseName=$1 + local response=$( \ + curl \ + --location "$HOST/api/v0/databases/$databaseName" \ + --request DELETE \ + --header "Accept: application/json" \ + --header "Authorization: Bearer $MANAGEMENT_TOKEN" \ + ) + echo "$response" + } + - target: $.paths['/databases/{databaseName}/tables'] + update: + post: + operationId: CreateClusterDatabaseTable + summary: Create a database table + tags: + - Tables + parameters: + - name: databaseName + in: path + description: The name of the database to create the database table for + required: true + schema: + $ref: '#/components/schemas/ClusterDatabaseName' + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + name: + $ref: '#/components/schemas/ClusterDatabaseTableName' + partitionTemplate: + $ref: '#/components/schemas/ClusterDatabasePartitionTemplate' + required: + - name + examples: + requiredFieldsOnly: + summary: Required Fields Only + value: + name: TableOne + allFields: + summary: All Fields + value: + name: TableTwo + partitionTemplate: + - type: time + value: '%Y' + - type: tag + value: bananas + - type: tag + value: plátanos + - type: bucket + value: + tagName: c + numberOfBuckets: 10 + responses: + '200': + description: The cluster database table was successfully created + content: + application/json: + schema: + type: object + properties: + databaseName: + description: The name of the database that the database table belongs to + $ref: '#/components/schemas/ClusterDatabaseName' + name: + description: The name of the database table + $ref: '#/components/schemas/ClusterDatabaseTableName' + partitionTemplate: + $ref: '#/components/schemas/ClusterDatabasePartitionTemplate' + required: + - databaseName + - name + examples: + requiredFieldsOnly: + summary: Required Fields Only + value: + databaseName: DatabaseOne + name: TableOne + allFields: + summary: All Fields + value: + databaseName: DatabaseOne + name: TableTwo + partitionTemplate: + - type: time + value: '%Y' + - type: tag + value: a + - type: tag + value: c + - type: bucket + value: + tagName: c + numberOfBuckets: 10 + '400': + $ref: '#/components/responses/BadRequest' + '401': + $ref: '#/components/responses/Unauthorized' + '403': + $ref: '#/components/responses/Forbidden' + '404': + $ref: '#/components/responses/NotFound' + '409': + $ref: '#/components/responses/Conflict' + '500': + $ref: '#/components/responses/InternalServerError' + - target: $.paths['/tokens'] + update: + get: + operationId: GetDatabaseTokens + summary: Get all database tokens for a cluster + tags: + - Database tokens + responses: + '200': + description: The database tokens were successfully retrieved + content: + application/json: + schema: + type: array + items: + type: object + properties: + id: + description: The ID of the database token + $ref: '#/components/schemas/UuidV4' + description: + $ref: '#/components/schemas/DatabaseTokenDescription' + permissions: + $ref: '#/components/schemas/DatabaseTokenPermissions' + createdAt: + $ref: '#/components/schemas/DatabaseTokenCreatedAt' + expiresAt: + $ref: '#/components/schemas/DatabaseTokenExpiresAt' + revokedAt: + $ref: '#/components/schemas/DatabaseTokenRevokedAt' + required: + - id + - description + - permissions + - createdAt + example: + - id: 55555555-5555-4555-8555-555555555555 + description: Limited Access Token + permissions: + - action: read + resource: DatabaseOne + - action: write + resource: DatabaseTwo + createdAt: '2023-12-21T17:32:28.000Z' + - id: 66666666-6666-4666-8666-666666666666 + description: Full Access Token + permissions: + - action: write + resource: '*' + createdAt: '2024-03-02T04:20:19.000Z' + - id: 77777777-7777-4777-8777-777777777777 + description: No Access Token + permissions: [] + createdAt: '2024-03-02T04:20:19.000Z' + '400': + $ref: '#/components/responses/BadRequest' + '401': + $ref: '#/components/responses/Unauthorized' + '403': + $ref: '#/components/responses/Forbidden' + '404': + $ref: '#/components/responses/NotFound' + '500': + $ref: '#/components/responses/InternalServerError' + x-codeSamples: + - label: cURL + lang: Shell + source: | + HOST="https://cluster-host.com" + + list_tokens () { + local response=$( \ + curl \ + --location "$HOST/api/v0/tokens" \ + --header "Accept: application/json" \ + --header "Authorization: Bearer $MANAGEMENT_TOKEN" \ + ) + echo "$response" + } + post: + operationId: CreateDatabaseToken + summary: Create a database token + tags: + - Database tokens + description: | + Create a [database token](/influxdb3/clustered/admin/tokens/database/) for a cluster. + + The token returned on the `accessToken` property in the response can be used to authenticate query and write requests to the cluster. + + ### Notable behaviors + + - InfluxDB might take some time--from a few seconds to a few minutes--to activate and synchronize new tokens. If a new database token doesn't immediately work (you receive a `401 Unauthorized` error) for querying or writing, wait and then try your request again. + + - Token strings are viewable _only_ on token creation and aren't stored by InfluxDB; you can't recover a lost token. + + #### Store secure tokens in a secret store + + We recommend storing database tokens in a **secure secret store**. + For example, see how to [authenticate Telegraf using tokens in your OS secret store](https://github.com/influxdata/telegraf/tree/master/plugins/secretstores/os). + + If you lose a token, [delete the token from InfluxDB](/influxdb3/clustered/admin/tokens/database/delete/) and create a new one. + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + description: + $ref: '#/components/schemas/DatabaseTokenDescription' + permissions: + $ref: '#/components/schemas/DatabaseTokenPermissions' + expiresAt: + $ref: '#/components/schemas/DatabaseTokenExpiresAt' + required: + - description + examples: + limitedAccessToken: + summary: Limited Access Token + value: + description: Limited Access Token + permissions: + - action: read + resource: DatabaseOne + - action: write + resource: DatabaseTwo + fullAccessToken: + summary: Full Access Token + value: + description: Full Access Token + permissions: + - action: write + resource: '*' + noAccessToken: + summary: No Access Token + value: + description: No Access Token + permissions: [] + responses: + '200': + description: The database token was successfully created + content: + application/json: + schema: + type: object + properties: + id: + description: The ID of the database token + $ref: '#/components/schemas/UuidV4' + description: + $ref: '#/components/schemas/DatabaseTokenDescription' + permissions: + $ref: '#/components/schemas/DatabaseTokenPermissions' + createdAt: + $ref: '#/components/schemas/DatabaseTokenCreatedAt' + accessToken: + $ref: '#/components/schemas/DatabaseTokenAccessToken' + expiresAt: + $ref: '#/components/schemas/DatabaseTokenExpiresAt' + revokedAt: + $ref: '#/components/schemas/DatabaseTokenRevokedAt' + required: + - id + - description + - permissions + - createdAt + - accessToken + examples: + limitedAccessToken: + summary: Limited Access Token + value: + id: 55555555-5555-4555-8555-555555555555 + description: Limited Access Token + permissions: + - action: read + resource: DatabaseOne + - action: write + resource: DatabaseTwo + createdAt: '2023-12-21T17:32:28.000Z' + accessToken: apiv1_5555555555555555555555555555555555555555555555555555555555555555 + fullAccessToken: + summary: Full Access Token + value: + id: 66666666-6666-4666-8666-666666666666 + description: Full Access Token + permissions: + - action: write + resource: '*' + createdAt: '2024-03-02T04:20:19.000Z' + accessToken: apiv1_6666666666666666666666666666666666666666666666666666666666666666 + noAccessToken: + summary: No Access Token + value: + id: 66666666-6666-4666-8666-666666666666 + description: No Access Token + permissions: [] + createdAt: '2024-03-02T04:20:19.000Z' + accessToken: apiv1_7777777777777777777777777777777777777777777777777777777777777777 + '400': + $ref: '#/components/responses/BadRequest' + '401': + $ref: '#/components/responses/Unauthorized' + '403': + $ref: '#/components/responses/Forbidden' + '404': + $ref: '#/components/responses/NotFound' + '409': + $ref: '#/components/responses/Conflict' + '500': + $ref: '#/components/responses/InternalServerError' + x-codeSamples: + - label: cURL + lang: Shell + source: | + HOST="https://cluster-host.com" + + create_token () { + local description=$1 + local response=$( \ + curl \ + --location "$HOST/api/v0/tokens" \ + --header "Accept: application/json" \ + --header 'Content-Type: application/json' \ + --header "Authorization: Bearer $MANAGEMENT_TOKEN" \ + --data '{ + "description": "'$description'", + "permissions": [ + { + "action": "read", + "resource": "DatabaseOne" + }, + { + "action": "write", + "resource": "DatabaseTwo" + } + ] + }' \ + ) + echo "$response" + } + - target: $.paths['/tokens/{tokenId}'] + update: + get: + operationId: GetDatabaseToken + summary: Get a database token + tags: + - Database tokens + parameters: + - name: tokenId + in: path + description: The ID of the [database token](/influxdb3/clustered/admin/tokens/database/) to get + required: true + schema: + $ref: '#/components/schemas/UuidV4' + responses: + '200': + description: The database token was successfully retrieved. + content: + application/json: + schema: + type: object + properties: + id: + description: The ID of the database token + $ref: '#/components/schemas/UuidV4' + description: + $ref: '#/components/schemas/DatabaseTokenDescription' + permissions: + $ref: '#/components/schemas/DatabaseTokenPermissions' + createdAt: + $ref: '#/components/schemas/DatabaseTokenCreatedAt' + expiresAt: + $ref: '#/components/schemas/DatabaseTokenExpiresAt' + revokedAt: + $ref: '#/components/schemas/DatabaseTokenRevokedAt' + required: + - id + - description + - permissions + - createdAt + examples: + limitedAccessToken: + summary: Limited Access Token + value: + id: 55555555-5555-4555-8555-555555555555 + description: Limited Access Token + permissions: + - action: read + resource: DatabaseOne + - action: write + resource: DatabaseTwo + createdAt: '2023-12-21T17:32:28.000Z' + fullAccessToken: + summary: Full Access Token + value: + id: 66666666-6666-4666-8666-666666666666 + description: Full Access Token + permissions: + - action: write + resource: '*' + createdAt: '2024-03-02T04:20:19.000Z' + noAccessToken: + summary: No Access Token + value: + id: 77777777-7777-4777-8777-777777777777 + description: No Access Token + permissions: [] + createdAt: '2024-03-02T04:20:19.000Z' + '400': + $ref: '#/components/responses/BadRequest' + '401': + $ref: '#/components/responses/Unauthorized' + '403': + $ref: '#/components/responses/Forbidden' + '404': + $ref: '#/components/responses/NotFound' + '500': + $ref: '#/components/responses/InternalServerError' + x-codeSamples: + - label: cURL + lang: Shell + source: | + HOST="https://cluster-host.com" + + get_token () { + local tokenId=$1 + local response=$( \ + curl \ + --location "$HOST/api/v0/tokens/$tokenId" \ + --header "Accept: application/json" \ + --header "Authorization: Bearer $MANAGEMENT_TOKEN" \ + ) + echo "$response" + } + patch: + operationId: UpdateDatabaseToken + summary: Update a database token + tags: + - Database tokens + parameters: + - name: tokenId + in: path + description: The ID of the [database token](/influxdb3/clustered/admin/tokens/database/) to update + required: true + schema: + $ref: '#/components/schemas/UuidV4' + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + description: + $ref: '#/components/schemas/DatabaseTokenDescription' + permissions: + $ref: '#/components/schemas/DatabaseTokenPermissions' + minProperties: 1 + examples: + allFields: + summary: Update All Fields + value: + description: Updated Limited Access Token + permissions: + - action: write + resource: DatabaseOne + - action: read + resource: DatabaseTwo + - action: write + resource: DatabaseThree + descriptionOnly: + summary: Update Description Only + value: + description: Updated Limited Access Token + permissionsOnly: + summary: Update Permissions Only + value: + permissions: + - action: write + resource: DatabaseOne + - action: read + resource: DatabaseTwo + - action: write + resource: DatabaseThree + removeAllPermissions: + summary: Remove All Permissions + value: + permissions: [] + responses: + '200': + description: The database token was successfully updated + content: + application/json: + schema: + type: object + properties: + id: + description: The ID of the database token + $ref: '#/components/schemas/UuidV4' + description: + $ref: '#/components/schemas/DatabaseTokenDescription' + permissions: + $ref: '#/components/schemas/DatabaseTokenPermissions' + createdAt: + $ref: '#/components/schemas/DatabaseTokenCreatedAt' + expiresAt: + $ref: '#/components/schemas/DatabaseTokenExpiresAt' + revokedAt: + $ref: '#/components/schemas/DatabaseTokenRevokedAt' + required: + - id + - description + - permissions + - createdAt + examples: + allFields: + summary: Update All Fields + value: + id: 55555555-5555-4555-8555-555555555555 + description: Updated Limited Access Token + permissions: + - action: write + resource: DatabaseOne + - action: read + resource: DatabaseTwo + - action: write + resource: DatabaseThree + createdAt: '2023-12-21T17:32:28.000Z' + descriptionOnly: + summary: Update Description Only + value: + id: 55555555-5555-4555-8555-555555555555 + description: Updated Limited Access Token + permissions: + - action: read + resource: DatabaseOne + - action: write + resource: DatabaseTwo + createdAt: '2023-12-21T17:32:28.000Z' + permissionsOnly: + summary: Update Permissions Only + value: + id: 55555555-5555-4555-8555-555555555555 + description: Limited Access Token + permissions: + - action: write + resource: DatabaseOne + - action: read + resource: DatabaseTwo + - action: write + resource: DatabaseThree + createdAt: '2023-12-21T17:32:28.000Z' + removeAllPermissions: + summary: Remove All Permissions + value: + id: 55555555-5555-4555-8555-555555555555 + description: Limited Access Token + permissions: [] + createdAt: '2023-12-21T17:32:28.000Z' + '400': + $ref: '#/components/responses/BadRequest' + '401': + $ref: '#/components/responses/Unauthorized' + '403': + $ref: '#/components/responses/Forbidden' + '404': + $ref: '#/components/responses/NotFound' + '409': + $ref: '#/components/responses/Conflict' + '500': + $ref: '#/components/responses/InternalServerError' + x-codeSamples: + - label: cURL + lang: Shell + source: | + HOST="https://cluster-host.com" + + update_token () { + local tokenId=$1 + local response=$( \ + curl \ + --location "$HOST/api/v0/tokens/$tokenId" \ + --request PATCH \ + --header "Accept: application/json" \ + --header 'Content-Type: application/json' \ + --header "Authorization: Bearer $MANAGEMENT_TOKEN" \ + --data '{ + "description": "Updated Limited Access Token", + "permissions": [ + { + "action": "write", + "resource": "DatabaseOne" + }, + { + "action": "read", + "resource": "DatabaseTwo" + }, + { + "action": "write", + "resource": "DatabaseThree" + } + ] + }' \ + ) + echo "$response" + } + delete: + operationId: DeleteDatabaseToken + summary: Delete a database token + tags: + - Database tokens + parameters: + - name: tokenId + in: path + description: The ID of the [database token](/influxdb3/clustered/admin/tokens/database/) to delete + required: true + schema: + $ref: '#/components/schemas/UuidV4' + responses: + '204': + description: The database token was successfully deleted + $ref: '#/components/responses/NoContent' + '400': + $ref: '#/components/responses/BadRequest' + '401': + $ref: '#/components/responses/Unauthorized' + '403': + $ref: '#/components/responses/Forbidden' + '404': + $ref: '#/components/responses/NotFound' + '500': + $ref: '#/components/responses/InternalServerError' + x-codeSamples: + - label: cURL + lang: Shell + source: | + HOST="https://cluster-host.com" + + delete_token () { + local tokenId=$1 + local response=$( \ + curl \ + --location "$HOST/api/v0/tokens/$tokenId" \ + --request DELETE \ + --header "Accept: application/json" \ + --header "Authorization: Bearer $MANAGEMENT_TOKEN" \ + ) + echo "$response" + } diff --git a/api-docs/influxdb3/core/v3/content/info.yml b/api-docs/influxdb3/core/v3/content/info.yml deleted file mode 100644 index 34e55186eb..0000000000 --- a/api-docs/influxdb3/core/v3/content/info.yml +++ /dev/null @@ -1,34 +0,0 @@ -title: InfluxDB 3 Core API Service -x-influxdata-short-title: InfluxDB 3 API -x-influxdata-version-matrix: - v1: Compatibility layer for InfluxDB 1.x clients (supported) - v2: Compatibility layer for InfluxDB 2.x clients (supported) - v3: Native API for InfluxDB 3.x (current) -x-influxdata-short-description: The InfluxDB 3 HTTP API provides a programmatic interface for interactions with InfluxDB, including writing, querying, and processing data, and managing an InfluxDB 3 instance. -description: | - The InfluxDB HTTP API for InfluxDB 3 Core provides a programmatic interface for - interacting with InfluxDB 3 Core databases and resources. - Use this API to: - - - Write data to InfluxDB 3 Core databases - - Query data using SQL or InfluxQL - - Process data using Processing engine plugins - - Manage databases, tables, and Processing engine triggers - - Perform administrative tasks and access system information - - The API includes endpoints under the following paths: - - `/api/v3`: InfluxDB 3 Core native endpoints - - `/`: Compatibility endpoints for InfluxDB v1 workloads and clients - - `/api/v2/write`: Compatibility endpoint for InfluxDB v2 workloads and clients - - -license: - name: MIT - url: 'https://opensource.org/licenses/MIT' -contact: - name: InfluxData - url: https://www.influxdata.com - email: support@influxdata.com \ No newline at end of file diff --git a/api-docs/influxdb3/core/v3/content/servers.yml b/api-docs/influxdb3/core/v3/content/servers.yml deleted file mode 100644 index 213371401c..0000000000 --- a/api-docs/influxdb3/core/v3/content/servers.yml +++ /dev/null @@ -1,8 +0,0 @@ -- url: https://{baseurl} - description: InfluxDB 3 Core API URL - variables: - baseurl: - enum: - - 'localhost:8181' - default: 'localhost:8181' - description: InfluxDB 3 Core URL diff --git a/api-docs/influxdb3/core/v3/content/tag-groups.yml b/api-docs/influxdb3/core/v3/content/tag-groups.yml deleted file mode 100644 index 364d5e7940..0000000000 --- a/api-docs/influxdb3/core/v3/content/tag-groups.yml +++ /dev/null @@ -1,15 +0,0 @@ -- name: Using the InfluxDB HTTP API - tags: - - Quick start - - Authentication - - Cache data - - Common parameters - - Response codes - - Compatibility endpoints - - Database - - Processing engine - - Server information - - Table - - Token - - Query data - - Write data diff --git a/api-docs/influxdb3/core/v3/overlay.yml b/api-docs/influxdb3/core/v3/overlay.yml index 2069de4185..94b8ffe7c1 100644 --- a/api-docs/influxdb3/core/v3/overlay.yml +++ b/api-docs/influxdb3/core/v3/overlay.yml @@ -4,32 +4,43 @@ info: version: 1.0.0 actions: - # Override info.title - - target: $.info.title - update: InfluxDB 3 Core API Service + # Override info section + - target: $.info + update: + title: InfluxDB 3 Core API Service + x-influxdata-short-title: InfluxDB 3 API + x-influxdata-version-matrix: + v1: Compatibility layer for InfluxDB 1.x clients (supported) + v2: Compatibility layer for InfluxDB 2.x clients (supported) + v3: Native API for InfluxDB 3.x (current) + x-influxdata-short-description: The InfluxDB 3 HTTP API provides a programmatic interface for interactions with InfluxDB, including writing, querying, and processing data, and managing an InfluxDB 3 instance. + description: | + The InfluxDB HTTP API for InfluxDB 3 Core provides a programmatic interface for + interacting with InfluxDB 3 Core databases and resources. + Use this API to: - # Override info.description - - target: $.info.description - update: | - The InfluxDB HTTP API for InfluxDB 3 Core provides a programmatic interface for - interacting with InfluxDB 3 Core databases and resources. - Use this API to: + - Write data to InfluxDB 3 Core databases + - Query data using SQL or InfluxQL + - Process data using Processing engine plugins + - Manage databases, tables, and Processing engine triggers + - Perform administrative tasks and access system information - - Write data to InfluxDB 3 Core databases - - Query data using SQL or InfluxQL - - Process data using Processing engine plugins - - Manage databases, tables, and Processing engine triggers - - Perform administrative tasks and access system information + The API includes endpoints under the following paths: + - `/api/v3`: InfluxDB 3 Core native endpoints + - `/`: Compatibility endpoints for InfluxDB v1 workloads and clients + - `/api/v2/write`: Compatibility endpoint for InfluxDB v2 workloads and clients - The API includes endpoints under the following paths: - - `/api/v3`: InfluxDB 3 Core native endpoints - - `/`: Compatibility endpoints for InfluxDB v1 workloads and clients - - `/api/v2/write`: Compatibility endpoint for InfluxDB v2 workloads and clients - - + + license: + name: MIT + url: 'https://opensource.org/licenses/MIT' + contact: + name: InfluxData + url: https://www.influxdata.com + email: support@influxdata.com # Override servers[0].description - target: $.servers[0].description @@ -38,3 +49,22 @@ actions: # Override servers[0].variables.baseurl.description - target: $.servers[0].variables.baseurl.description update: InfluxDB 3 Core URL + + # Set tag groups for navigation + - target: $.x-tagGroups + update: + - name: Using the InfluxDB HTTP API + tags: + - Quick start + - Authentication + - Cache data + - Common parameters + - Response codes + - Compatibility endpoints + - Database + - Processing engine + - Server information + - Table + - Token + - Query data + - Write data diff --git a/api-docs/influxdb3/core/v3/ref.yml b/api-docs/influxdb3/core/v3/ref.yml index 3010cd9b29..a2a0720b27 100644 --- a/api-docs/influxdb3/core/v3/ref.yml +++ b/api-docs/influxdb3/core/v3/ref.yml @@ -21,7 +21,7 @@ info: This documentation is generated from the [InfluxDB OpenAPI specification](https://raw.githubusercontent.com/influxdata/). --> - version: '' + version: 3.7.0 license: name: MIT url: https://opensource.org/licenses/MIT @@ -29,15 +29,14 @@ info: name: InfluxData url: https://www.influxdata.com email: support@influxdata.com + x-influxdata-short-title: InfluxDB 3 API + x-influxdata-version-matrix: + v1: Compatibility layer for InfluxDB 1.x clients (supported) + v2: Compatibility layer for InfluxDB 2.x clients (supported) + v3: Native API for InfluxDB 3.x (current) + x-influxdata-short-description: The InfluxDB 3 HTTP API provides a programmatic interface for interactions with InfluxDB, including writing, querying, and processing data, and managing an InfluxDB 3 instance. servers: - - url: https://{baseurl} - description: InfluxDB 3 Core API URL - variables: - baseurl: - enum: - - localhost:8181 - default: localhost:8181 - description: InfluxDB 3 Core URL + - url: / security: - BearerAuthentication: [] tags: diff --git a/api-docs/influxdb3/enterprise/v3/content/info.yml b/api-docs/influxdb3/enterprise/v3/content/info.yml deleted file mode 100644 index e4ec8ef609..0000000000 --- a/api-docs/influxdb3/enterprise/v3/content/info.yml +++ /dev/null @@ -1,34 +0,0 @@ -title: InfluxDB 3 Enterprise API Service -x-influxdata-short-title: InfluxDB 3 API -x-influxdata-version-matrix: - v1: Compatibility layer for InfluxDB 1.x clients (supported) - v2: Compatibility layer for InfluxDB 2.x clients (supported) - v3: Native API for InfluxDB 3.x (current) -x-influxdata-short-description: The InfluxDB 3 HTTP API provides a programmatic interface for interactions with InfluxDB, including writing, querying, and processing data, and managing an InfluxDB 3 instance. -description: | - The InfluxDB HTTP API for InfluxDB 3 Enterprise provides a programmatic interface for - interacting with InfluxDB 3 Enterprise databases and resources. - Use this API to: - - - Write data to InfluxDB 3 Enterprise databases - - Query data using SQL or InfluxQL - - Process data using Processing engine plugins - - Manage databases, tables, and Processing engine triggers - - Perform administrative tasks and access system information - - The API includes endpoints under the following paths: - - `/api/v3`: InfluxDB 3 Enterprise native endpoints - - `/`: Compatibility endpoints for InfluxDB v1 workloads and clients - - `/api/v2/write`: Compatibility endpoint for InfluxDB v2 workloads and clients - - -license: - name: MIT - url: 'https://opensource.org/licenses/MIT' -contact: - name: InfluxData - url: https://www.influxdata.com - email: support@influxdata.com \ No newline at end of file diff --git a/api-docs/influxdb3/enterprise/v3/content/servers.yml b/api-docs/influxdb3/enterprise/v3/content/servers.yml deleted file mode 100644 index 29f1a6e695..0000000000 --- a/api-docs/influxdb3/enterprise/v3/content/servers.yml +++ /dev/null @@ -1,8 +0,0 @@ -- url: https://{baseurl} - description: InfluxDB 3 Enterprise API URL - variables: - baseurl: - enum: - - 'localhost:8181' - default: 'localhost:8181' - description: InfluxDB 3 Enterprise URL diff --git a/api-docs/influxdb3/enterprise/v3/content/tag-groups.yml b/api-docs/influxdb3/enterprise/v3/content/tag-groups.yml deleted file mode 100644 index 364d5e7940..0000000000 --- a/api-docs/influxdb3/enterprise/v3/content/tag-groups.yml +++ /dev/null @@ -1,15 +0,0 @@ -- name: Using the InfluxDB HTTP API - tags: - - Quick start - - Authentication - - Cache data - - Common parameters - - Response codes - - Compatibility endpoints - - Database - - Processing engine - - Server information - - Table - - Token - - Query data - - Write data diff --git a/api-docs/influxdb3/enterprise/v3/overlay.yml b/api-docs/influxdb3/enterprise/v3/overlay.yml index 86e498a73f..b276b8011d 100644 --- a/api-docs/influxdb3/enterprise/v3/overlay.yml +++ b/api-docs/influxdb3/enterprise/v3/overlay.yml @@ -4,29 +4,43 @@ info: version: 1.0.0 actions: - # Override info.title - - target: $.info.title - update: InfluxDB 3 Enterprise API Service + # Override info section + - target: $.info + update: + title: InfluxDB 3 Enterprise API Service + x-influxdata-short-title: InfluxDB 3 API + x-influxdata-version-matrix: + v1: Compatibility layer for InfluxDB 1.x clients (supported) + v2: Compatibility layer for InfluxDB 2.x clients (supported) + v3: Native API for InfluxDB 3.x (current) + x-influxdata-short-description: The InfluxDB 3 HTTP API provides a programmatic interface for interactions with InfluxDB, including writing, querying, and processing data, and managing an InfluxDB 3 instance. + description: | + The InfluxDB HTTP API for InfluxDB 3 Enterprise provides a programmatic interface for + interacting with InfluxDB 3 Enterprise databases and resources. + Use this API to: + + - Write data to InfluxDB 3 Enterprise databases + - Query data using SQL or InfluxQL + - Process data using Processing engine plugins + - Manage databases, tables, and Processing engine triggers + - Perform administrative tasks and access system information - # Override info.description - - target: $.info.description - update: | - The InfluxDB HTTP API for InfluxDB 3 Enterprise provides a programmatic interface for - interacting with InfluxDB 3 Enterprise databases and resources. - Use this API to: - - Write data to InfluxDB 3 Enterprise databases - - Query data using SQL or InfluxQL - - Process data using Processing engine plugins - - Manage databases, tables, and Processing engine triggers - - Perform administrative tasks and access system information - The API includes endpoints under the following paths: - - `/api/v3`: InfluxDB 3 Enterprise native endpoints - - `/`: Compatibility endpoints for InfluxDB v1 workloads and clients - - `/api/v2/write`: Compatibility endpoint for InfluxDB v2 workloads and clients - + The API includes endpoints under the following paths: + - `/api/v3`: InfluxDB 3 Enterprise native endpoints + - `/`: Compatibility endpoints for InfluxDB v1 workloads and clients + - `/api/v2/write`: Compatibility endpoint for InfluxDB v2 workloads and clients + + + license: + name: MIT + url: 'https://opensource.org/licenses/MIT' + contact: + name: InfluxData + url: https://www.influxdata.com + email: support@influxdata.com # Override servers[0].description - target: $.servers[0].description @@ -36,6 +50,25 @@ actions: - target: $.servers[0].variables.baseurl.description update: InfluxDB 3 Enterprise URL + # Set tag groups for navigation + - target: $.x-tagGroups + update: + - name: Using the InfluxDB HTTP API + tags: + - Quick start + - Authentication + - Cache data + - Common parameters + - Response codes + - Compatibility endpoints + - Database + - Processing engine + - Server information + - Table + - Token + - Query data + - Write data + # Enterprise-only: Add PATCH method to /api/v3/configure/table - target: $.paths['/api/v3/configure/table'].patch update: diff --git a/api-docs/influxdb3/enterprise/v3/ref.yml b/api-docs/influxdb3/enterprise/v3/ref.yml index b7bf7c2c02..5d2b67fb2b 100644 --- a/api-docs/influxdb3/enterprise/v3/ref.yml +++ b/api-docs/influxdb3/enterprise/v3/ref.yml @@ -21,7 +21,7 @@ info: This documentation is generated from the [InfluxDB OpenAPI specification](https://raw.githubusercontent.com/influxdata/). --> - version: '' + version: 3.7.0 license: name: MIT url: https://opensource.org/licenses/MIT @@ -29,15 +29,14 @@ info: name: InfluxData url: https://www.influxdata.com email: support@influxdata.com + x-influxdata-short-title: InfluxDB 3 API + x-influxdata-version-matrix: + v1: Compatibility layer for InfluxDB 1.x clients (supported) + v2: Compatibility layer for InfluxDB 2.x clients (supported) + v3: Native API for InfluxDB 3.x (current) + x-influxdata-short-description: The InfluxDB 3 HTTP API provides a programmatic interface for interactions with InfluxDB, including writing, querying, and processing data, and managing an InfluxDB 3 instance. servers: - - url: https://{baseurl} - description: InfluxDB 3 Enterprise API URL - variables: - baseurl: - enum: - - localhost:8181 - default: localhost:8181 - description: InfluxDB 3 Enterprise URL + - url: / security: - BearerAuthentication: [] tags: diff --git a/api-docs/influxdb3/shared/management/base.yml b/api-docs/influxdb3/shared/management/base.yml new file mode 100644 index 0000000000..6935675a0f --- /dev/null +++ b/api-docs/influxdb3/shared/management/base.yml @@ -0,0 +1,460 @@ +openapi: 3.1.0 +info: + title: InfluxDB 3 Management API + description: | + The Management API for InfluxDB 3 provides a programmatic interface for managing an InfluxDB 3 cluster. + The Management API lets you integrate functions such as creating and managing databases, permissions, and tokens into your workflow or application. + + This documentation is generated from the + InfluxDB 3 Management API OpenAPI specification. + version: '' + license: + name: MIT + url: https://opensource.org/licenses/MIT + contact: + name: InfluxData + url: https://www.influxdata.com + email: support@influxdata.com +servers: + - url: https://{baseurl}/api/v0 + description: InfluxDB 3 Management API URL + variables: + baseurl: + enum: + - console.influxdata.com + default: console.influxdata.com + description: InfluxDB 3 Console URL +security: + - bearerAuthManagementToken: [] + bearerAuthJwt: [] +tags: + - name: Authentication + x-traitTag: true + description: | + InfluxDB Management API endpoints require authentication with a management token. + - name: Database tokens + description: Manage database read/write tokens for a cluster + - name: Databases + description: Manage databases for a cluster + - name: Quickstart + x-traitTag: true + description: | + See the product-specific documentation for quickstart examples. + - name: Tables + description: List and delete tables for a database +paths: {} +components: + schemas: + Error: + type: object + properties: + code: + type: integer + message: + type: string + examples: + - code: 400 + message: bad request + - code: 401 + message: unauthorized + - code: 403 + message: forbidden + - code: 404 + message: not found + - code: 409 + message: conflict + - code: 500 + message: internal server error + required: + - code + - message + DateTimeRfc3339: + type: string + format: date-time + examples: + - '2023-12-21T17:32:28Z' + UuidV4: + type: string + format: uuid + examples: + - 11111111-1111-4111-8111-111111111111 + - 22222222-1111-4111-8111-111111111111 + ClusterDatabaseName: + description: The name of the cluster database + type: string + examples: + - DatabaseOne + - DatabaseTwo + maxLength: 64 + minLength: 1 + ClusterDatabaseRetentionPeriod: + description: | + The retention period of the [cluster database](/influxdb/version/admin/databases/) in nanoseconds, if applicable + + If the retention period is not set or is set to 0, the database will have infinite retention + type: integer + format: int64 + default: 0 + examples: + - 300000000000 + - 600000000000 + minimum: 0 + ClusterDatabaseMaxTables: + description: The maximum number of tables for the cluster database + type: integer + format: int32 + default: 500 + examples: + - 100 + - 300 + minimum: 1 + ClusterDatabaseMaxColumnsPerTable: + description: The maximum number of columns per table for the cluster database + type: integer + format: int32 + default: 200 + examples: + - 50 + - 150 + minimum: 1 + ClusterDatabasePartitionTemplate: + description: | + A template for [partitioning](/influxdb/version/admin/custom-partitions/) a cluster database. + + Each template part is evaluated in sequence, concatenating the final + partition key from the output of each part, delimited by the partition + key delimiter `|`. + + For example, using the partition template below: + + ```json + [ + { + "type": "time", + "value": "%Y" + }, + { + "type": "tag", + "value": "bananas" + }, + { + "type": "tag", + "value": "plátanos" + }, + { + "type": "bucket", + "value": { + "tagName": "c", + "numberOfBuckets": 10 + } + } + ] + ``` + + The following partition keys are derived: + + * `time=2023-01-01, a=bananas, b=plátanos, c=ananas` -> `2023|bananas|plátanos|5` + * `time=2023-01-01, b=plátanos` -> `2023|!|plátanos|!` + * `time=2023-01-01, another=cat, b=plátanos` -> `2023|!|plátanos|!` + * `time=2023-01-01` -> `2023|!|!|!` + * `time=2023-01-01, a=cat|dog, b=!, c=!` -> `2023|cat%7Cdog|%21|8` + * `time=2023-01-01, a=%50, c=%50` -> `2023|%2550|!|9` + * `time=2023-01-01, a=, c=` -> `2023|^|!|0` + * `time=2023-01-01, a=` -> `2023|#|!|!` + * `time=2023-01-01, c=` -> `2023|!|!|` + + When using the default [partitioning](/influxdb/version/admin/custom-partitions/) template (YYYY-MM-DD) there is no + encoding necessary, as the derived partition key contains a single part, and + no reserved characters. [`TemplatePart::Bucket`] parts by definition will + always be within the part length limit and contain no restricted characters + so are also not percent-encoded and/or truncated. + type: array + items: + $ref: '#/components/schemas/ClusterDatabasePartitionTemplatePart' + examples: + - - type: time + value: '%Y' + - type: tag + value: bananas + - type: tag + value: plátanos + - type: bucket + value: + tagName: c + numberOfBuckets: 10 + maxItems: 8 + minItems: 1 + uniqueItems: true + ClusterDatabasePartitionTemplatePart: + description: A sub-part of a `PartitionTemplate` + anyOf: + - $ref: '#/components/schemas/ClusterDatabasePartitionTemplatePartTagValue' + - $ref: '#/components/schemas/ClusterDatabasePartitionTemplatePartTimeFormat' + - $ref: '#/components/schemas/ClusterDatabasePartitionTemplatePartBucket' + examples: + - type: time + value: '%Y' + - type: tag + value: bananas + - type: tag + value: plátanos + - type: bucket + value: + tagName: c + numberOfBuckets: 10 + ClusterDatabasePartitionTemplatePartTagValue: + description: | + A tag value matcher that extracts a string value from the specified tag name + + If a row does not contain a value for the specified tag name, the NULL/missing partition key part `!` is rendered. + type: object + properties: + type: + type: string + enum: + - tag + value: + type: string + minLength: 1 + examples: + - type: tag + value: bananas + - type: tag + value: plátanos + ClusterDatabasePartitionTemplatePartTimeFormat: + description: A time format matcher that accepts a "strftime"-like format string and evaluates it against the "time" column + type: object + properties: + type: + type: string + enum: + - time + value: + type: string + minLength: 1 + examples: + - type: time + value: '%Y' + ClusterDatabasePartitionTemplatePartBucket: + description: | + A bucketing matcher that sorts data through a uniform hash function on the values of the given tag name. + + If a row does not contain a value for the specified tag name, the NULL/missing partition key part `!` is rendered. + type: object + properties: + type: + type: string + enum: + - bucket + value: + type: object + properties: + tagName: + description: The name of the tag used to derive the bucket the data belongs in + type: string + minLength: 1 + numberOfBuckets: + description: The number of buckets tag values are distributed across + type: integer + format: int32 + maximum: 100000 + minimum: 1 + examples: + - type: bucket + value: + tagName: c + numberOfBuckets: 10 + ClusterDatabaseTableName: + description: The name of the [cluster database](/influxdb/version/admin/databases/) table + type: string + examples: + - TableOne + - TableTwo + minLength: 1 + DatabaseTokenDescription: + description: The description of the database token + type: string + examples: + - Limited Access Token + - Full Access Token + DatabaseTokenResourceAllDatabases: + description: A resource value for a [database token](/influxdb/version/admin/tokens/database/) permission that refers to all databases + type: string + enum: + - '*' + DatabaseTokenPermissionAction: + description: The action the [database token](/influxdb/version/admin/tokens/database/) permission allows + type: string + DatabaseTokenPermissionResource: + description: The resource the [database token](/influxdb/version/admin/tokens/database/) permission applies to + anyOf: + - $ref: '#/components/schemas/ClusterDatabaseName' + - $ref: '#/components/schemas/DatabaseTokenResourceAllDatabases' + examples: + - DatabaseOne + - DatabaseTwo + - '*' + DatabaseTokenPermission: + description: The description of the database token + type: object + properties: + action: + $ref: '#/components/schemas/DatabaseTokenPermissionAction' + resource: + $ref: '#/components/schemas/DatabaseTokenPermissionResource' + examples: + - action: read + resource: DatabaseOne + - action: write + resource: DatabaseTwo + - action: write + resource: '*' + DatabaseTokenPermissions: + description: The list of permissions the [database token](/influxdb/version/admin/tokens/database/) allows + type: array + items: + $ref: '#/components/schemas/DatabaseTokenPermission' + examples: + - - action: read + resource: DatabaseOne + - action: write + resource: DatabaseTwo + - - action: write + resource: '*' + DatabaseTokenCreatedAt: + description: | + The date and time that the [database token](/influxdb/version/admin/tokens/database/) was created + + Uses RFC3339 format + $ref: '#/components/schemas/DateTimeRfc3339' + examples: + - '2023-12-21T17:32:28.000Z' + - '2024-03-02T04:20:19.000Z' + DatabaseTokenExpiresAt: + description: | + The date and time that the database token expires, if applicable + + Uses RFC3339 format + $ref: '#/components/schemas/DateTimeRfc3339' + DatabaseTokenRevokedAt: + description: | + The date and time that the database token was revoked, if applicable + + Uses RFC3339 format + $ref: '#/components/schemas/DateTimeRfc3339' + DatabaseTokenAccessToken: + description: | + The access token that can be used to authenticate query and write requests to the cluster + + The access token is never stored by InfluxDB and is only returned once when the token is created. If the access token is lost, a new token must be created. + type: string + examples: + - apiv1_5555555555555555555555555555555555555555555555555555555555555555 + - apiv1_6666666666666666666666666666666666666666666666666666666666666666 + minLength: 64 + responses: + BadRequest: + description: Bad Request + content: + application/json: + schema: + properties: + code: + type: integer + enum: + - 400 + $ref: '#/components/schemas/Error' + example: + code: 400 + message: bad request + Unauthorized: + description: Unauthorized + content: + application/json: + schema: + properties: + code: + type: integer + enum: + - 401 + $ref: '#/components/schemas/Error' + example: + code: 401 + message: unauthorized + Forbidden: + description: Forbidden + content: + application/json: + schema: + properties: + code: + type: integer + enum: + - 403 + $ref: '#/components/schemas/Error' + example: + code: 403 + message: forbidden + NotFound: + description: Not Found + content: + application/json: + schema: + properties: + code: + type: integer + enum: + - 404 + $ref: '#/components/schemas/Error' + example: + code: 404 + message: not found + Conflict: + description: Conflict + content: + application/json: + schema: + properties: + code: + type: integer + enum: + - 409 + $ref: '#/components/schemas/Error' + example: + code: 409 + message: conflict + InternalServerError: + description: Internal Server Error + content: + application/json: + schema: + properties: + code: + type: integer + enum: + - 500 + $ref: '#/components/schemas/Error' + example: + code: 500 + message: internal server error + NoContent: + description: No Content + securitySchemes: + bearerAuthManagementToken: + type: http + scheme: bearer + bearerFormat: Management Token + bearerAuthJwt: + type: http + scheme: bearer + bearerFormat: JWT +x-tagGroups: + - name: Using the Management API + tags: + - Authentication + - Quickstart + - name: All endpoints + tags: + - Database tokens + - Databases + - Tables diff --git a/api-docs/scripts/apply-overlay.js b/api-docs/scripts/apply-overlay.js index 2f57fa68ea..a0a7b28b04 100644 --- a/api-docs/scripts/apply-overlay.js +++ b/api-docs/scripts/apply-overlay.js @@ -115,15 +115,21 @@ function setPath(obj, segments, value) { /** * Deep merge two objects + * Arrays are replaced, not merged. * @param {object} target - Target object * @param {object} source - Source object to merge * @returns {object} - Merged object */ function deepMerge(target, source) { + // Arrays are replaced entirely, not merged + if (Array.isArray(source)) { + return source; + } + if (typeof source !== 'object' || source === null) { return source; } - if (typeof target !== 'object' || target === null) { + if (typeof target !== 'object' || target === null || Array.isArray(target)) { return source; }