Harness-engineering harness-data-pipeline
name: harness-data-pipeline
install
source · Clone the upstream repo
git clone https://github.com/Intense-Visions/harness-engineering
manifest:
agents/skills/claude-code/harness-data-pipeline/skill.yamlsource content
name: harness-data-pipeline version: "1.0.0" description: ETL/ELT patterns, data quality checks, pipeline testing, and data workflow management stability: static cognitive_mode: meticulous-verifier triggers:
- manual
- on_pr
- on_commit platforms:
- claude-code
- gemini-cli
- cursor
- codex tools:
- Bash
- Read
- Write
- Edit
- Glob
- Grep
- emit_interaction
cli:
command: harness skill run harness-data-pipeline
args:
- name: path description: Project root path required: false
- name: framework description: "Pipeline framework: dbt, airflow, dagster, prefect. Auto-detected when omitted." required: false
- name: check-quality description: Run data quality validation rules against pipeline definitions required: false mcp: tool: run_skill input: skill: harness-data-pipeline path: string type: rigid tier: 3 internal: false keywords:
- data pipeline
- ETL
- ELT
- data quality
- pipeline testing
- Airflow
- dbt
- Dagster
- Prefect
- data transformation
- data ingestion
- data warehouse
- BigQuery
- Snowflake stack_signals:
- "dbt/"
- "dbt_project.yml"
- "airflow/"
- "dags/"
- "dagster/"
- "pipelines/"
- "etl/"
- "src//transforms/"
- "models/" phases:
- name: detect description: Identify pipeline framework, DAG structure, data sources, and sink targets required: true
- name: analyze description: Evaluate pipeline patterns, dependency graphs, idempotency, and error handling required: true
- name: validate description: Check data quality rules, schema contracts, freshness SLAs, and test coverage required: true
- name: document description: Generate pipeline documentation, lineage diagrams, and quality check reports required: true state: persistent: false files: [] depends_on: []