git clone https://github.com/vibeforge1111/vibeship-spawner-skills
integrations/salesforce-development/skill.yamlSalesforce Development Skill
Lightning Web Components, Apex, APIs, and Salesforce DX
id: salesforce-development name: Salesforce Development version: 1.0.0 description: | Expert patterns for Salesforce platform development including Lightning Web Components (LWC), Apex triggers and classes, REST/Bulk APIs, Connected Apps, and Salesforce DX with scratch orgs and 2nd generation packages (2GP).
category: integrations tags:
- salesforce
- lwc
- apex
- crm
- platform
- sfdx
- lightning
triggers:
- "salesforce"
- "sfdc"
- "apex"
- "lwc"
- "lightning web components"
- "sfdx"
- "scratch org"
- "visualforce"
- "soql"
- "governor limits"
- "connected app"
Claude already knows: basic SOQL syntax, general OOP patterns
Claude doesn't know well: governor limit gotchas, LWC reactive patterns, 2GP packaging
patterns:
-
name: "Lightning Web Component with Wire Service" description: | Use @wire decorator for reactive data binding with Lightning Data Service or Apex methods. @wire fits LWC's reactive architecture and enables Salesforce performance optimizations. context:
- "building LWC components"
- "fetching Salesforce data"
- "reactive UI" example: | // myComponent.js import { LightningElement, wire, api } from 'lwc'; import { getRecord, getFieldValue } from 'lightning/uiRecordApi'; import getRelatedRecords from '@salesforce/apex/MyController.getRelatedRecords'; import ACCOUNT_NAME from '@salesforce/schema/Account.Name'; import ACCOUNT_INDUSTRY from '@salesforce/schema/Account.Industry';
const FIELDS = [ACCOUNT_NAME, ACCOUNT_INDUSTRY];
export default class MyComponent extends LightningElement { @api recordId; // Passed from parent or record page
// Wire to Lightning Data Service (preferred for single records) @wire(getRecord, { recordId: '$recordId', fields: FIELDS }) account; // Wire to Apex method (for complex queries) @wire(getRelatedRecords, { accountId: '$recordId' }) wiredRecords({ error, data }) { if (data) { this.relatedRecords = data; this.error = undefined; } else if (error) { this.error = error; this.relatedRecords = undefined; } } get accountName() { return getFieldValue(this.account.data, ACCOUNT_NAME); } get isLoading() { return !this.account.data && !this.account.error; } // Reactive: changing recordId automatically re-fetches}
// myComponent.html <template> <lightning-card title={accountName}> <template if:true={isLoading}> <lightning-spinner alternative-text="Loading"></lightning-spinner> </template>
</template><template if:true={account.data}> <p>Industry: {industry}</p> </template> <template if:true={error}> <p class="slds-text-color_error">{error.body.message}</p> </template> </lightning-card>// MyController.cls public with sharing class MyController { @AuraEnabled(cacheable=true) public static List<Contact> getRelatedRecords(Id accountId) { return [ SELECT Id, Name, Email, Phone FROM Contact WHERE AccountId = :accountId WITH SECURITY_ENFORCED LIMIT 100 ]; } }
-
name: "Bulkified Apex Trigger with Handler Pattern" description: | Apex triggers must be bulkified to handle 200+ records per transaction. Use handler pattern for separation of concerns, testability, and recursion prevention. context:
- "apex triggers"
- "data operations"
- "automation" example: | // AccountTrigger.trigger trigger AccountTrigger on Account ( before insert, before update, before delete, after insert, after update, after delete, after undelete ) { new AccountTriggerHandler().run(); }
// TriggerHandler.cls (base class) public virtual class TriggerHandler { // Recursion prevention private static Set<String> executedHandlers = new Set<String>();
public void run() { String handlerName = String.valueOf(this).split(':')[0]; // Prevent recursion String contextKey = handlerName + '_' + Trigger.operationType; if (executedHandlers.contains(contextKey)) { return; } executedHandlers.add(contextKey); switch on Trigger.operationType { when BEFORE_INSERT { this.beforeInsert(); } when BEFORE_UPDATE { this.beforeUpdate(); } when BEFORE_DELETE { this.beforeDelete(); } when AFTER_INSERT { this.afterInsert(); } when AFTER_UPDATE { this.afterUpdate(); } when AFTER_DELETE { this.afterDelete(); } when AFTER_UNDELETE { this.afterUndelete(); } } } // Override in child classes protected virtual void beforeInsert() {} protected virtual void beforeUpdate() {} protected virtual void beforeDelete() {} protected virtual void afterInsert() {} protected virtual void afterUpdate() {} protected virtual void afterDelete() {} protected virtual void afterUndelete() {}}
// AccountTriggerHandler.cls public class AccountTriggerHandler extends TriggerHandler { private List<Account> newAccounts; private List<Account> oldAccounts; private Map<Id, Account> newMap; private Map<Id, Account> oldMap;
public AccountTriggerHandler() { this.newAccounts = (List<Account>) Trigger.new; this.oldAccounts = (List<Account>) Trigger.old; this.newMap = (Map<Id, Account>) Trigger.newMap; this.oldMap = (Map<Id, Account>) Trigger.oldMap; } protected override void afterInsert() { createDefaultContacts(); notifySlack(); } protected override void afterUpdate() { handleIndustryChange(); } // BULKIFIED: Query once, update once private void createDefaultContacts() { List<Contact> contactsToInsert = new List<Contact>(); for (Account acc : newAccounts) { if (acc.Type == 'Prospect') { contactsToInsert.add(new Contact( AccountId = acc.Id, LastName = 'Primary Contact', Email = 'contact@' + acc.Website )); } } if (!contactsToInsert.isEmpty()) { insert contactsToInsert; // Single DML for all } } private void handleIndustryChange() { Set<Id> changedAccountIds = new Set<Id>(); for (Account acc : newAccounts) { Account oldAcc = oldMap.get(acc.Id); if (acc.Industry != oldAcc.Industry) { changedAccountIds.add(acc.Id); } } if (!changedAccountIds.isEmpty()) { // Queue async processing for heavy work System.enqueueJob(new IndustryChangeQueueable(changedAccountIds)); } } private void notifySlack() { // Offload callouts to async List<Id> accountIds = new List<Id>(newMap.keySet()); System.enqueueJob(new SlackNotificationQueueable(accountIds)); }}
-
name: "Queueable Apex for Async Processing" description: | Use Queueable Apex for async processing with support for non-primitive types, monitoring via AsyncApexJob, and job chaining. Limit: 50 jobs per transaction, 1 child job when chaining. context:
-
"async processing"
-
"long-running operations"
-
"callouts from triggers" example: | // IndustryChangeQueueable.cls public class IndustryChangeQueueable implements Queueable, Database.AllowsCallouts { private Set<Id> accountIds; private Integer retryCount;
public IndustryChangeQueueable(Set<Id> accountIds) { this(accountIds, 0); }
public IndustryChangeQueueable(Set<Id> accountIds, Integer retryCount) { this.accountIds = accountIds; this.retryCount = retryCount; }
public void execute(QueueableContext context) { try { // Query with fresh data List<Account> accounts = [ SELECT Id, Name, Industry, OwnerId FROM Account WHERE Id IN :accountIds WITH SECURITY_ENFORCED ];
// Process and make callout for (Account acc : accounts) { syncToExternalSystem(acc); } // Update records updateRelatedOpportunities(accountIds);} catch (Exception e) { handleError(e); } }
private void syncToExternalSystem(Account acc) { HttpRequest req = new HttpRequest(); req.setEndpoint('callout:ExternalCRM/accounts'); req.setMethod('POST'); req.setHeader('Content-Type', 'application/json'); req.setBody(JSON.serialize(new Map<String, Object>{ 'salesforceId' => acc.Id, 'name' => acc.Name, 'industry' => acc.Industry }));
Http http = new Http(); HttpResponse res = http.send(req);
if (res.getStatusCode() != 200 && res.getStatusCode() != 201) { throw new CalloutException('Sync failed: ' + res.getBody()); } }
private void updateRelatedOpportunities(Set<Id> accIds) { List<Opportunity> oppsToUpdate = [ SELECT Id, Industry__c, AccountId FROM Opportunity WHERE AccountId IN :accIds WITH SECURITY_ENFORCED ];
Map<Id, Account> accountMap = new Map<Id, Account>([ SELECT Id, Industry FROM Account WHERE Id IN :accIds ]);
for (Opportunity opp : oppsToUpdate) { opp.Industry__c = accountMap.get(opp.AccountId).Industry; }
if (!oppsToUpdate.isEmpty()) { update oppsToUpdate; } }
private void handleError(Exception e) { // Log error System.debug(LoggingLevel.ERROR, 'Queueable failed: ' + e.getMessage());
// Retry with exponential backoff (max 3 retries) if (retryCount < 3) { // Chain new job for retry System.enqueueJob(new IndustryChangeQueueable(accountIds, retryCount + 1)); } else { // Create error record for monitoring insert new Integration_Error__c( Type__c = 'Industry Sync', Message__c = e.getMessage(), Stack_Trace__c = e.getStackTraceString(), Record_Ids__c = String.join(new List<Id>(accountIds), ',') ); } } }
-
-
name: "REST API Integration with Connected App" description: | External integrations use Connected Apps with OAuth 2.0. JWT Bearer flow for server-to-server, Web Server flow for user-facing apps. Always use Named Credentials for secure callout configuration. context:
- "external integration"
- "REST API access"
- "connected apps" example: | // Node.js - JWT Bearer Flow (server-to-server) import jwt from 'jsonwebtoken'; import fs from 'fs';
class SalesforceClient { private accessToken: string | null = null; private instanceUrl: string | null = null; private tokenExpiry: number = 0;
constructor( private clientId: string, private username: string, private privateKeyPath: string, private loginUrl: string = 'https://login.salesforce.com' ) {} async authenticate(): Promise<void> { // Check if token is still valid (5 min buffer) if (this.accessToken && Date.now() < this.tokenExpiry - 300000) { return; } const privateKey = fs.readFileSync(this.privateKeyPath, 'utf8'); // Create JWT assertion const claim = { iss: this.clientId, sub: this.username, aud: this.loginUrl, exp: Math.floor(Date.now() / 1000) + 300 // 5 minutes }; const assertion = jwt.sign(claim, privateKey, { algorithm: 'RS256' }); // Exchange JWT for access token const response = await fetch(`${this.loginUrl}/services/oauth2/token`, { method: 'POST', headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, body: new URLSearchParams({ grant_type: 'urn:ietf:params:oauth:grant-type:jwt-bearer', assertion }) }); if (!response.ok) { const error = await response.json(); throw new Error(`Auth failed: ${error.error_description}`); } const data = await response.json(); this.accessToken = data.access_token; this.instanceUrl = data.instance_url; this.tokenExpiry = Date.now() + 7200000; // 2 hours } async query(soql: string): Promise<any> { await this.authenticate(); const response = await fetch( `${this.instanceUrl}/services/data/v59.0/query?q=${encodeURIComponent(soql)}`, { headers: { 'Authorization': `Bearer ${this.accessToken}`, 'Content-Type': 'application/json' } } ); if (!response.ok) { await this.handleError(response); } return response.json(); } async createRecord(sobject: string, data: object): Promise<any> { await this.authenticate(); const response = await fetch( `${this.instanceUrl}/services/data/v59.0/sobjects/${sobject}`, { method: 'POST', headers: { 'Authorization': `Bearer ${this.accessToken}`, 'Content-Type': 'application/json' }, body: JSON.stringify(data) } ); if (!response.ok) { await this.handleError(response); } return response.json(); } private async handleError(response: Response): Promise<never> { const error = await response.json(); if (response.status === 401) { // Token expired, clear and retry this.accessToken = null; throw new Error('Session expired, retry required'); } throw new Error(`API Error: ${JSON.stringify(error)}`); }}
// Usage const sf = new SalesforceClient( process.env.SF_CLIENT_ID!, process.env.SF_USERNAME!, './certificates/server.key' );
const accounts = await sf.query( "SELECT Id, Name FROM Account WHERE CreatedDate = TODAY" );
-
name: "Bulk API 2.0 for Large Data Operations" description: | Use Bulk API 2.0 for operations on 10K+ records. Asynchronous processing with job-based workflow. Part of REST API with streamlined interface compared to original Bulk API. context:
-
"large data volumes"
-
"data migration"
-
"bulk operations" example: | // Node.js - Bulk API 2.0 insert class SalesforceBulkClient extends SalesforceClient {
async bulkInsert(sobject: string, records: object[]): Promise<any> { await this.authenticate();
// Step 1: Create job const job = await this.createBulkJob(sobject, 'insert');
try { // Step 2: Upload data (CSV format) await this.uploadJobData(job.id, records);
// Step 3: Close job to start processing await this.closeJob(job.id); // Step 4: Poll for completion return await this.waitForJobCompletion(job.id);} catch (error) { // Abort job on error await this.abortJob(job.id); throw error; } }
private async createBulkJob(sobject: string, operation: string): Promise<any> { const response = await fetch(
, { method: 'POST', headers: { 'Authorization':${this.instanceUrl}/services/data/v59.0/jobs/ingest
, 'Content-Type': 'application/json' }, body: JSON.stringify({ object: sobject, operation, contentType: 'CSV', lineEnding: 'LF' }) } );Bearer ${this.accessToken}return response.json(); }
private async uploadJobData(jobId: string, records: object[]): Promise<void> { // Convert to CSV const csv = this.recordsToCSV(records);
await fetch(
, { method: 'PUT', headers: { 'Authorization':${this.instanceUrl}/services/data/v59.0/jobs/ingest/${jobId}/batches
, 'Content-Type': 'text/csv' }, body: csv } ); }Bearer ${this.accessToken}private async closeJob(jobId: string): Promise<void> { await fetch(
, { method: 'PATCH', headers: { 'Authorization':${this.instanceUrl}/services/data/v59.0/jobs/ingest/${jobId}
, 'Content-Type': 'application/json' }, body: JSON.stringify({ state: 'UploadComplete' }) } ); }Bearer ${this.accessToken}private async waitForJobCompletion(jobId: string): Promise<any> { const maxWaitTime = 10 * 60 * 1000; // 10 minutes const pollInterval = 5000; // 5 seconds const startTime = Date.now();
while (Date.now() - startTime < maxWaitTime) { const response = await fetch(
, { headers: { 'Authorization':${this.instanceUrl}/services/data/v59.0/jobs/ingest/${jobId}
} } );Bearer ${this.accessToken}const job = await response.json(); if (job.state === 'JobComplete') { // Get results return { success: job.numberRecordsProcessed - job.numberRecordsFailed, failed: job.numberRecordsFailed, failedResults: job.numberRecordsFailed > 0 ? await this.getFailedResults(jobId) : [] }; } if (job.state === 'Failed' || job.state === 'Aborted') { throw new Error(`Bulk job failed: ${job.state}`); } await new Promise(r => setTimeout(r, pollInterval));}
throw new Error('Bulk job timeout'); }
private async getFailedResults(jobId: string): Promise<any[]> { const response = await fetch(
, { headers: { 'Authorization':${this.instanceUrl}/services/data/v59.0/jobs/ingest/${jobId}/failedResults
} } );Bearer ${this.accessToken}const csv = await response.text(); return this.parseCSV(csv); }
private recordsToCSV(records: object[]): string { if (records.length === 0) return '';
const headers = Object.keys(records[0]); const rows = records.map(r => headers.map(h => this.escapeCSV(r[h])).join(',') );
return [headers.join(','), ...rows].join('\n'); }
private escapeCSV(value: any): string { if (value === null || value === undefined) return ''; const str = String(value); if (str.includes(',') || str.includes('"') || str.includes('\n')) { return
; } return str; } }"${str.replace(/"/g, '""')}"
-
-
name: "Salesforce DX with Scratch Orgs" description: | Source-driven development with disposable scratch orgs for isolated testing. Scratch orgs exist 7-30 days and can be created throughout the day, unlike sandbox refresh limits. context:
- "development workflow"
- "CI/CD"
- "testing" example: | // project-scratch-def.json - Scratch org definition { "orgName": "MyApp Dev Org", "edition": "Developer", "features": ["EnableSetPasswordInApi", "Communities"], "settings": { "lightningExperienceSettings": { "enableS1DesktopEnabled": true }, "mobileSettings": { "enableS1EncryptedStoragePref2": false }, "securitySettings": { "passwordPolicies": { "enableSetPasswordInApi": true } } } }
// sfdx-project.json - Project configuration { "packageDirectories": [ { "path": "force-app", "default": true, "package": "MyPackage", "versionName": "ver 1.0", "versionNumber": "1.0.0.NEXT", "dependencies": [ { "package": "SomePackage@2.0.0" } ] } ], "namespace": "myns", "sfdcLoginUrl": "https://login.salesforce.com", "sourceApiVersion": "59.0" }
Development workflow commands
1. Create scratch org
sf org create scratch
--definition-file config/project-scratch-def.json
--alias myapp-dev
--duration-days 7
--set-default2. Push source to scratch org
sf project deploy start --target-org myapp-dev
3. Assign permission set
sf org assign permset --name MyApp_Admin --target-org myapp-dev
4. Import sample data
sf data import tree --plan data/sample-data-plan.json --target-org myapp-dev
5. Open org
sf org open --target-org myapp-dev
6. Run tests
sf apex run test
--code-coverage
--result-format human
--wait 10
--target-org myapp-dev7. Pull changes back
sf project retrieve start --target-org myapp-dev
-
name: "2nd Generation Package (2GP) Development" description: | 2GP replaces 1GP with source-driven, modular packaging. Requires Dev Hub with 2GP enabled, namespace linked, and 75% code coverage for promoted packages. context:
- "packaging"
- "ISV development"
- "AppExchange" example: |
Enable Dev Hub and 2GP in Setup:
Setup > Dev Hub > Enable Dev Hub
Setup > Dev Hub > Enable Unlocked Packages and 2GP
Link namespace (required for managed packages)
sf package create
--name "MyManagedPackage"
--package-type Managed
--path force-app
--target-dev-hub DevHubCreate package version (beta)
sf package version create
--package "MyManagedPackage"
--installation-key-bypass
--wait 30
--code-coverage
--target-dev-hub DevHubCheck version status
sf package version list --packages "MyManagedPackage" --target-dev-hub DevHub
Promote to released (requires 75% coverage)
sf package version promote
--package "MyManagedPackage@1.0.0-1"
--target-dev-hub DevHubInstall in sandbox for testing
sf package install
--package "MyManagedPackage@1.0.0-1"
--target-org MySandbox
--wait 20CI/CD Pipeline (GitHub Actions)
.github/workflows/salesforce-ci.yml
name: Salesforce CI
on: push: branches: [main, develop] pull_request: branches: [main]
jobs: validate: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4
- name: Install Salesforce CLI run: npm install -g @salesforce/cli - name: Authenticate Dev Hub run: | echo "${{ secrets.SFDX_AUTH_URL }}" > auth.txt sf org login sfdx-url --sfdx-url-file auth.txt --alias DevHub --set-default-dev-hub - name: Create Scratch Org run: | sf org create scratch \ --definition-file config/project-scratch-def.json \ --alias ci-scratch \ --duration-days 1 \ --set-default - name: Deploy Source run: sf project deploy start --target-org ci-scratch - name: Run Tests run: | sf apex run test \ --code-coverage \ --result-format human \ --wait 20 \ --target-org ci-scratch - name: Delete Scratch Org if: always() run: sf org delete scratch --target-org ci-scratch --no-prompt
anti_patterns:
-
name: "SOQL Inside Loops" description: "Queries in loops hit the 100 SOQL limit instantly with bulk data" example: | // WRONG: Hits 100 SOQL limit with 100 records for (Account acc : accounts) { List<Contact> contacts = [SELECT Id FROM Contact WHERE AccountId = :acc.Id]; }
// RIGHT: Query once outside loop Map<Id, List<Contact>> contactsByAccount = new Map<Id, List<Contact>>(); for (Contact c : [SELECT Id, AccountId FROM Contact WHERE AccountId IN :accountIds]) { if (!contactsByAccount.containsKey(c.AccountId)) { contactsByAccount.put(c.AccountId, new List<Contact>()); } contactsByAccount.get(c.AccountId).add(c); }
-
name: "DML Inside Loops" description: "Each DML in loop counts toward 150 limit" example: | // WRONG: 150 DML limit hit with 150 records for (Account acc : accounts) { acc.Description = 'Updated'; update acc; }
// RIGHT: Single DML outside loop for (Account acc : accounts) { acc.Description = 'Updated'; } update accounts;
-
name: "Hardcoding IDs" description: "Record IDs differ between orgs" example: | // WRONG: Hardcoded record type ID account.RecordTypeId = '012000000000001AAA';
// RIGHT: Query by developer name Id rtId = Schema.SObjectType.Account .getRecordTypeInfosByDeveloperName() .get('Business_Account') .getRecordTypeId();
-
name: "Mixing Concerns in Trigger" description: "Logic in trigger body is hard to test and maintain" example: | // WRONG: All logic in trigger trigger AccountTrigger on Account (after insert) { // 200 lines of business logic... }
// RIGHT: Delegate to handler trigger AccountTrigger on Account (after insert) { new AccountTriggerHandler().run(); }
-
name: "Ignoring Security" description: "Not checking CRUD/FLS permissions" example: | // WRONG: No security check List<Account> accounts = [SELECT Id, Name FROM Account];
// RIGHT: WITH SECURITY_ENFORCED List<Account> accounts = [ SELECT Id, Name FROM Account WITH SECURITY_ENFORCED ];
// Or check programmatically if (!Schema.sObjectType.Account.isAccessible()) { throw new AuraHandledException('Insufficient permissions'); }
references:
- title: "LWC Developer Guide" url: "https://developer.salesforce.com/docs/platform/lwc/guide"
- title: "Apex Developer Guide" url: "https://developer.salesforce.com/docs/atlas.en-us.apexcode.meta"
- title: "REST API Developer Guide" url: "https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta"
- title: "Salesforce DX Developer Guide" url: "https://developer.salesforce.com/docs/atlas.en-us.sfdx_dev.meta"
- title: "Governor Limits Quick Reference" url: "https://resources.docs.salesforce.com/latest/latest/en-us/sfdc/pdf/salesforce_app_limits_cheatsheet.pdf"