Claude-code-plugins-plus-skills notion-local-dev-loop
install
source · Clone the upstream repo
git clone https://github.com/jeremylongshore/claude-code-plugins-plus-skills
Claude Code · Install into ~/.claude/skills/
T=$(mktemp -d) && git clone --depth=1 https://github.com/jeremylongshore/claude-code-plugins-plus-skills "$T" && mkdir -p ~/.claude/skills && cp -r "$T/plugins/saas-packs/notion-pack/skills/notion-local-dev-loop" ~/.claude/skills/jeremylongshore-claude-code-plugins-plus-skills-notion-local-dev-loop && rm -rf "$T"
manifest:
plugins/saas-packs/notion-pack/skills/notion-local-dev-loop/SKILL.mdsource content
Notion Local Dev Loop
Overview
Set up a fast, reproducible local development workflow for Notion integrations. This skill covers creating a dedicated dev integration with its own token, structuring the project for testability, mocking the Notion SDK in unit tests, and running integration tests against a sandboxed dev workspace. The approach keeps production data safe while enabling rapid iteration.
Prerequisites
- Completed
setup (you have a working Notion integration)notion-install-auth - Node.js 18+ with npm/pnpm, or Python 3.10+
- A Notion workspace where you can create test pages and databases
Instructions
Step 1: Create a Dev Integration and Workspace Sandbox
Create a separate integration exclusively for development. This prevents accidental writes to production data.
- Go to Settings & Members > Connections > Develop or manage integrations (or visit developers.notion.com)
- Click New integration and name it
My App — Dev - Copy the token (starts with
) intontn_.env.development - Create a dedicated Dev Workspace page (or a top-level "Dev Testing" page) and share it with the dev integration
- Inside that page, create test databases that mirror your production schema
# .env.development — git-ignored, dev only NOTION_TOKEN=ntn_dev_xxxxxxxxxxxxxxxxxxxx NOTION_TEST_DATABASE_ID=aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee NOTION_TEST_PAGE_ID=ffffffff-0000-1111-2222-333333333333 # .env.example — commit this as a template NOTION_TOKEN=ntn_your_dev_token_here NOTION_TEST_DATABASE_ID=your_test_db_id NOTION_TEST_PAGE_ID=your_test_page_id
Project structure:
my-notion-project/ ├── src/ │ ├── notion/ │ │ ├── client.ts # Singleton with retry + rate-limit awareness │ │ ├── queries.ts # Database query wrappers │ │ └── helpers.ts # Property extractors, rich text builders │ └── index.ts ├── tests/ │ ├── unit/ │ │ └── notion.test.ts # Mocked SDK tests │ └── integration/ │ └── notion.test.ts # Live API tests (gated) ├── .env.development # Dev token (git-ignored) ├── .env.example # Template for team ├── .gitignore ├── package.json ├── tsconfig.json └── vitest.config.ts
Step 2: Configure the Client with Retry and Rate-Limit Handling
The Notion API enforces a hard limit of 3 requests per second across all pricing tiers. Build retry logic into your client from day one.
// src/notion/client.ts import { Client, LogLevel, isNotionClientError, APIResponseError } from '@notionhq/client'; let instance: Client | null = null; export function getNotionClient(): Client { if (!instance) { instance = new Client({ auth: process.env.NOTION_TOKEN, // SDK reads NOTION_TOKEN automatically if omitted logLevel: process.env.NODE_ENV === 'development' ? LogLevel.DEBUG : LogLevel.WARN, // baseUrl can be overridden for proxy/mock servers: // baseUrl: process.env.NOTION_BASE_URL || 'https://api.notion.com', }); } return instance; } // Retry wrapper with exponential backoff for rate limits export async function withRetry<T>( fn: () => Promise<T>, maxRetries = 3 ): Promise<T> { for (let attempt = 0; attempt <= maxRetries; attempt++) { try { return await fn(); } catch (error) { if ( isNotionClientError(error) && error instanceof APIResponseError && error.status === 429 && attempt < maxRetries ) { const retryAfter = parseInt(error.headers?.get('retry-after') || '1', 10); const delay = retryAfter * 1000 * Math.pow(2, attempt); console.warn(`Rate limited. Retrying in ${delay}ms (attempt ${attempt + 1}/${maxRetries})`); await new Promise(resolve => setTimeout(resolve, delay)); continue; } throw error; } } throw new Error('Unreachable'); }
{ "scripts": { "dev": "tsx watch src/index.ts", "dev:debug": "NOTION_LOG_LEVEL=debug tsx watch src/index.ts", "test": "vitest", "test:watch": "vitest --watch", "test:integration": "INTEGRATION=true vitest run tests/integration/", "typecheck": "tsc --noEmit" }, "dependencies": { "@notionhq/client": "^2.2.0" }, "devDependencies": { "tsx": "^4.0.0", "typescript": "^5.0.0", "vitest": "^2.0.0", "dotenv": "^16.0.0" } }
Step 3: Write Unit Tests with Mocked SDK and Integration Tests
Unit tests mock the entire
@notionhq/client module so they run instantly with no network calls. Integration tests hit the real API but are gated behind an environment variable and target only the dev workspace.
// tests/unit/notion.test.ts import { describe, it, expect, vi, beforeEach } from 'vitest'; import { Client } from '@notionhq/client'; vi.mock('@notionhq/client', () => ({ Client: vi.fn().mockImplementation(() => ({ databases: { query: vi.fn(), retrieve: vi.fn(), create: vi.fn(), update: vi.fn(), }, pages: { create: vi.fn(), update: vi.fn(), retrieve: vi.fn(), }, blocks: { children: { list: vi.fn(), append: vi.fn() }, retrieve: vi.fn(), update: vi.fn(), delete: vi.fn(), }, search: vi.fn(), users: { list: vi.fn(), retrieve: vi.fn() }, })), isNotionClientError: vi.fn((err) => err?.code !== undefined), LogLevel: { DEBUG: 'debug', WARN: 'warn' }, })); describe('Database queries', () => { let notion: InstanceType<typeof Client>; beforeEach(() => { notion = new Client({ auth: 'ntn_test_token' }); }); it('queries database with a status filter', async () => { const mockResponse = { results: [ { id: 'page-1', properties: { Name: { type: 'title', title: [{ plain_text: 'Task 1' }] }, Status: { type: 'select', select: { name: 'Done' } }, }, }, ], has_more: false, next_cursor: null, }; (notion.databases.query as ReturnType<typeof vi.fn>).mockResolvedValue(mockResponse); const result = await notion.databases.query({ database_id: 'test-db-id', filter: { property: 'Status', select: { equals: 'Done' } }, }); expect(result.results).toHaveLength(1); expect(notion.databases.query).toHaveBeenCalledWith( expect.objectContaining({ filter: { property: 'Status', select: { equals: 'Done' } }, }) ); }); it('handles pagination across multiple pages', async () => { const queryMock = notion.databases.query as ReturnType<typeof vi.fn>; queryMock .mockResolvedValueOnce({ results: [{ id: '1' }], has_more: true, next_cursor: 'cursor-abc' }) .mockResolvedValueOnce({ results: [{ id: '2' }], has_more: false, next_cursor: null }); const page1 = await notion.databases.query({ database_id: 'db' }); expect(page1.has_more).toBe(true); const page2 = await notion.databases.query({ database_id: 'db', start_cursor: page1.next_cursor, }); expect(page2.has_more).toBe(false); expect(queryMock).toHaveBeenCalledTimes(2); }); });
// tests/integration/notion.test.ts import { describe, it, expect } from 'vitest'; import { Client } from '@notionhq/client'; const SKIP = !process.env.INTEGRATION; describe.skipIf(SKIP)('Notion Integration (live API)', () => { const notion = new Client({ auth: process.env.NOTION_TOKEN! }); const testDbId = process.env.NOTION_TEST_DATABASE_ID!; it('connects and lists workspace users', async () => { const { results } = await notion.users.list({}); expect(results.length).toBeGreaterThan(0); }); it('queries the test database', async () => { const response = await notion.databases.query({ database_id: testDbId, page_size: 1, }); expect(response.results).toBeDefined(); }); it('creates and archives a test page (cleanup)', async () => { const page = await notion.pages.create({ parent: { database_id: testDbId }, properties: { Name: { title: [{ text: { content: `DevLoop Test ${Date.now()}` } }] }, }, }); expect(page.id).toBeTruthy(); // Always clean up await notion.pages.update({ page_id: page.id, archived: true }); }); });
Vitest configuration:
// vitest.config.ts import { defineConfig } from 'vitest/config'; export default defineConfig({ test: { globals: true, setupFiles: ['dotenv/config'], testTimeout: 30_000, // Notion API can be slow under rate limits include: ['tests/**/*.test.ts'], }, });
Output
After completing these steps you will have:
- A dedicated dev integration with its own token, isolated from production
- A singleton client with built-in retry logic for the 3 req/s rate limit
- Unit tests that run instantly using mocked
@notionhq/client - Integration tests gated behind
, targeting dev-only pagesINTEGRATION=true - Hot reload via
for rapid iterationtsx watch - Type checking via
tsc --noEmit
Error Handling
| Error | Cause | Solution |
|---|---|---|
| Missing or not loaded | Run and fill in dev token |
| Token invalid or integration not connected to page | Re-share the dev page with the dev integration |
(database/page) | Test DB not shared with dev integration | Open DB in Notion > > Connections > add your dev integration |
| Mock not intercepting calls | not at file top level | Move above all imports |
| Exceeded 3 req/s | Use wrapper; add delay between batch operations |
| Integration tests timeout | Slow API under rate limits | Increase in vitest config; reduce test data volume |
connection refused | Proxy or mock server not running | Verify proxy is up; remove override for direct API access |
Examples
TypeScript: Quick Connection Test
import { Client } from '@notionhq/client'; const notion = new Client({ auth: process.env.NOTION_TOKEN }); async function smokeTest() { const { results } = await notion.users.list({}); console.log(`Connected. ${results.length} user(s) in workspace.`); // Verify dev database access const db = await notion.databases.retrieve({ database_id: process.env.NOTION_TEST_DATABASE_ID!, }); console.log(`Dev database: "${(db as any).title?.[0]?.plain_text || db.id}"`); } smokeTest().catch(console.error);
Python: Dev Environment with notion-client
import os from notion_client import Client from dotenv import load_dotenv load_dotenv(".env.development") notion = Client(auth=os.environ["NOTION_TOKEN"]) # Quick smoke test users = notion.users.list() print(f"Connected. {len(users['results'])} user(s) in workspace.") # Query dev database db_id = os.environ["NOTION_TEST_DATABASE_ID"] results = notion.databases.query(database_id=db_id, page_size=1) print(f"Dev database has {len(results['results'])} page(s) (showing 1)") # Mock example for pytest def test_query_with_mock(mocker): mock_notion = mocker.patch("notion_client.Client") mock_notion.return_value.databases.query.return_value = { "results": [{"id": "page-1"}], "has_more": False, "next_cursor": None, } client = Client(auth="ntn_test") result = client.databases.query(database_id="test-db") assert len(result["results"]) == 1
Resources
- @notionhq/client (npm) — official Node.js SDK
- notion-sdk-py (PyPI) — official Python SDK
- Notion API Rate Limits — 3 req/s across all tiers
- Notion API Errors — status codes and retry guidance
- Vitest Mocking Guide —
patterns for SDK mockingvi.mock
Next Steps
See
notion-sdk-patterns for production-ready query helpers, pagination utilities, and property extraction functions.